Merge "Add YCBCR_P210 format" into main
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 34ff367..e9b757b 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -30,10 +30,8 @@
 
 # Only turn on clang-format check for the following subfolders.
 clang_format = --commit ${PREUPLOAD_COMMIT} --style file --extensions c,h,cc,cpp
-               camera/
                media/libaudioclient/tests/
                media/libaudiohal/tests/
                media/libmediatranscoding/
                services/camera/virtualcamera/
                services/mediatranscoding/
-               services/camera/libcameraservice/
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index db33dd7..8e53ca0 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -171,3 +171,10 @@
     description: "Use the context-provided AttributionSource when checking for client permissions"
     bug: "190657833"
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "check_full_attribution_source_chain"
+    description: "Pass the full AttributionSource chain to PermissionChecker"
+    bug: "190657833"
+}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 6862cb1..d0df90b 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -22,6 +22,11 @@
     default_applicable_licenses: ["frameworks_av_camera_license"],
 }
 
+vintf_fragment {
+    name: "manifest_android.frameworks.cameraservice.service.xml",
+    src: "manifest_android.frameworks.cameraservice.service.xml",
+}
+
 cc_binary {
     name: "cameraserver",
 
@@ -61,7 +66,7 @@
 
     init_rc: ["cameraserver.rc"],
 
-    vintf_fragments: [
+    vintf_fragment_modules: [
         "manifest_android.frameworks.cameraservice.service.xml",
     ],
 }
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 6d29ef5..53c4489 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -170,97 +170,78 @@
 }
 
 sp<hardware::ICameraService> CameraManagerGlobal::getCameraServiceLocked() {
-    if (mCameraService.get() == nullptr) {
-        if (CameraUtils::isCameraServiceDisabled()) {
-            return mCameraService;
-        }
+    if (mCameraService.get() != nullptr) {
+        return mCameraService;
+    }
+    if (CameraUtils::isCameraServiceDisabled()) {
+        return mCameraService;
+    }
 
-        sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder;
-        binder = sm->checkService(String16(kCameraServiceName));
-        if (binder == nullptr) {
-            ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+    sp<IServiceManager> sm = defaultServiceManager();
+    sp<IBinder> binder;
+    binder = sm->checkService(String16(kCameraServiceName));
+    if (binder == nullptr) {
+        ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+        return nullptr;
+    }
+    sp<hardware::ICameraService> cameraService = interface_cast<hardware::ICameraService>(binder);
+    if (mDeathNotifier == nullptr) {
+        mDeathNotifier = new DeathNotifier(this);
+        binder->linkToDeath(mDeathNotifier);
+    }
+
+    // Setup looper thread to perform availability callbacks
+    if (mCbLooper == nullptr) {
+        mCbLooper = new ALooper;
+        mCbLooper->setName("C2N-mgr-looper");
+        status_t err = mCbLooper->start(
+                /*runOnCallingThread*/false,
+                /*canCallJava*/       true,
+                PRIORITY_DEFAULT);
+        if (err != OK) {
+            ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
+                    __FUNCTION__, strerror(-err), err);
+            mCbLooper.clear();
             return nullptr;
         }
-        if (mDeathNotifier == nullptr) {
-            mDeathNotifier = new DeathNotifier(this);
+        if (mHandler == nullptr) {
+            mHandler = new CallbackHandler(this);
         }
-        binder->linkToDeath(mDeathNotifier);
-        mCameraService = interface_cast<hardware::ICameraService>(binder);
+        mCbLooper->registerHandler(mHandler);
+    }
 
-        // Setup looper thread to perfrom availiability callbacks
-        if (mCbLooper == nullptr) {
-            mCbLooper = new ALooper;
-            mCbLooper->setName("C2N-mgr-looper");
-            status_t err = mCbLooper->start(
-                    /*runOnCallingThread*/false,
-                    /*canCallJava*/       true,
-                    PRIORITY_DEFAULT);
-            if (err != OK) {
-                ALOGE("%s: Unable to start camera service listener looper: %s (%d)",
-                        __FUNCTION__, strerror(-err), err);
-                mCbLooper.clear();
-                return nullptr;
-            }
-            if (mHandler == nullptr) {
-                mHandler = new CallbackHandler(this);
-            }
-            mCbLooper->registerHandler(mHandler);
+    // register ICameraServiceListener
+    std::vector<hardware::CameraStatus> cameraStatuses{};
+    if (mCameraServiceListener == nullptr) {
+        mCameraServiceListener = new CameraServiceListener(this);
+        cameraService->addListener(mCameraServiceListener, &cameraStatuses);
+    }
+
+    for (auto& c : cameraStatuses) {
+        onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
+
+        for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
+            onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
+                                    c.deviceId, c.cameraId, unavailablePhysicalId);
         }
+    }
+    // setup vendor tags
+    if (!setupVendorTags(cameraService)) {
+        ALOGE("%s: Vendor tag descriptor cache couldn't be set up", __FUNCTION__);
+        return nullptr;
+    }
 
-        // register ICameraServiceListener
-        if (mCameraServiceListener == nullptr) {
-            mCameraServiceListener = new CameraServiceListener(this);
-        }
-        std::vector<hardware::CameraStatus> cameraStatuses{};
-        mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
-        for (auto& c : cameraStatuses) {
-            onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
+    mCameraService = cameraService;
+    ALOGE_IF(mCameraService == nullptr, "no CameraService!?");
+    return mCameraService;
+}
 
-            for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
-                onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
-                                      c.deviceId, c.cameraId, unavailablePhysicalId);
-            }
-        }
-
-        // setup vendor tags
-        sp<VendorTagDescriptor> desc = new VendorTagDescriptor();
-        binder::Status ret = mCameraService->getCameraVendorTagDescriptor(/*out*/desc.get());
-
-        if (ret.isOk()) {
-            if (0 < desc->getTagCount()) {
-                status_t err = VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
-                if (err != OK) {
-                    ALOGE("%s: Failed to set vendor tag descriptors, received error %s (%d)",
-                            __FUNCTION__, strerror(-err), err);
-                }
-            } else {
-                sp<VendorTagDescriptorCache> cache =
-                        new VendorTagDescriptorCache();
-                binder::Status res =
-                        mCameraService->getCameraVendorTagCache(
-                                /*out*/cache.get());
-                if (res.serviceSpecificErrorCode() ==
-                        hardware::ICameraService::ERROR_DISCONNECTED) {
-                    // No camera module available, not an error on devices with no cameras
-                    VendorTagDescriptorCache::clearGlobalVendorTagCache();
-                } else if (res.isOk()) {
-                    status_t err =
-                            VendorTagDescriptorCache::setAsGlobalVendorTagCache(
-                                    cache);
-                    if (err != OK) {
-                        ALOGE("%s: Failed to set vendor tag cache,"
-                                "received error %s (%d)", __FUNCTION__,
-                                strerror(-err), err);
-                    }
-                } else {
-                    VendorTagDescriptorCache::clearGlobalVendorTagCache();
-                    ALOGE("%s: Failed to setup vendor tag cache: %s",
-                            __FUNCTION__, res.toString8().c_str());
-                }
-            }
-        } else if (ret.serviceSpecificErrorCode() ==
-                hardware::ICameraService::ERROR_DEPRECATED_HAL) {
+bool CameraManagerGlobal::setupVendorTags(sp<hardware::ICameraService> &cameraService) {
+    sp<VendorTagDescriptor> desc = new VendorTagDescriptor();
+    binder::Status ret = cameraService->getCameraVendorTagDescriptor(/*out*/desc.get());
+    if (!ret.isOk()) {
+        if (ret.serviceSpecificErrorCode() ==
+            hardware::ICameraService::ERROR_DEPRECATED_HAL) {
             ALOGW("%s: Camera HAL too old; does not support vendor tags",
                     __FUNCTION__);
             VendorTagDescriptor::clearGlobalVendorTagDescriptor();
@@ -268,9 +249,45 @@
             ALOGE("%s: Failed to get vendor tag descriptors: %s",
                     __FUNCTION__, ret.toString8().c_str());
         }
+        return false;
     }
-    ALOGE_IF(mCameraService == nullptr, "no CameraService!?");
-    return mCameraService;
+
+    if (0 < desc->getTagCount()) {
+        status_t err = VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
+        if (err != OK) {
+            ALOGE("%s: Failed to set vendor tag descriptors, received error %s (%d)",
+                    __FUNCTION__, strerror(-err), err);
+            return false;
+        }
+    } else {
+        sp<VendorTagDescriptorCache> cache =
+                new VendorTagDescriptorCache();
+        binder::Status res =
+                cameraService->getCameraVendorTagCache(
+                        /*out*/cache.get());
+        if (res.serviceSpecificErrorCode() ==
+                hardware::ICameraService::ERROR_DISCONNECTED) {
+            // No camera module available, not an error on devices with no cameras
+            VendorTagDescriptorCache::clearGlobalVendorTagCache();
+        } else if (res.isOk()) {
+            status_t err =
+                    VendorTagDescriptorCache::setAsGlobalVendorTagCache(
+                            cache);
+            if (err != OK) {
+                ALOGE("%s: Failed to set vendor tag cache,"
+                        "received error %s (%d)", __FUNCTION__,
+                        strerror(-err), err);
+                return false;
+            }
+        } else {
+            VendorTagDescriptorCache::clearGlobalVendorTagCache();
+            ALOGE("%s: Failed to setup vendor tag cache: %s",
+                    __FUNCTION__, res.toString8().c_str());
+            return false;
+        }
+    }
+
+    return true;
 }
 
 void CameraManagerGlobal::DeathNotifier::binderDied(const wp<IBinder>&)
@@ -290,6 +307,8 @@
                                       key.cameraId);
         }
         cm->mCameraService.clear();
+        cm->mCameraServiceListener.clear();
+        cm->mDeathNotifier.clear();
         // TODO: consider adding re-connect call here?
     }
 }
@@ -398,6 +417,9 @@
 bool CameraManagerGlobal::supportsCamera2ApiLocked(const std::string &cameraId) {
     bool camera2Support = false;
     auto cs = getCameraServiceLocked();
+    if (cs == nullptr) {
+        return false;
+    }
     binder::Status serviceRet =
         cs->supportsCameraApi(cameraId,
                 hardware::ICameraService::API_VERSION_2, &camera2Support);
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index f4124ef..cb7a4ff 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -105,6 +105,8 @@
     template <class T>
     void registerAvailCallback(const DeviceContext& deviceContext, const T* callback);
 
+    bool setupVendorTags(sp<hardware::ICameraService> &cameraService);
+
     class DeathNotifier : public IBinder::DeathRecipient {
       public:
         explicit DeathNotifier(CameraManagerGlobal* cm) : mCameraManager(cm) {}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 1817490..7fb2d05 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -480,10 +480,21 @@
      * camera device auto-exposure routine for the overridden
      * fields for a given capture will be available in its
      * CaptureResult.</p>
+     * <p>When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON and if the device
+     * supports manual flash strength control, i.e.,
+     * if ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1, then
+     * the auto-exposure (AE) precapture metering sequence should be
+     * triggered to avoid the image being incorrectly exposed at
+     * different ACAMERA_FLASH_STRENGTH_LEVEL.</p>
      *
+     * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_CONTROL_MODE
      * @see ACAMERA_FLASH_INFO_AVAILABLE
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_STRENGTH_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      * @see ACAMERA_SENSOR_FRAME_DURATION
      * @see ACAMERA_SENSOR_SENSITIVITY
@@ -8233,7 +8244,17 @@
      * ACAMERA_SENSOR_FRAME_DURATION are ignored. The
      * application has control over the various
      * ACAMERA_FLASH_* fields.</p>
+     * <p>If the device supports manual flash strength control, i.e.,
+     * if ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1, then
+     * the auto-exposure (AE) precapture metering sequence should be
+     * triggered for the configured flash mode and strength to avoid
+     * the image being incorrectly exposed at different
+     * ACAMERA_FLASH_STRENGTH_LEVEL.</p>
      *
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_STRENGTH_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      * @see ACAMERA_SENSOR_EXPOSURE_TIME
      * @see ACAMERA_SENSOR_FRAME_DURATION
      * @see ACAMERA_SENSOR_SENSITIVITY
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index cdba8ff..5b69f5c 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -194,11 +194,11 @@
     return (strncmp(value, "0", 2) != 0 && strncasecmp(value, "false", 6) != 0);
 }
 
-bool CameraManagerGlobal::setupVendorTags() {
+bool CameraManagerGlobal::setupVendorTags(std::shared_ptr<ICameraService> &cameraService) {
     sp<VendorTagDescriptorCache> tagCache = new VendorTagDescriptorCache();
     Status status = Status::NO_ERROR;
     std::vector<ProviderIdAndVendorTagSections> providerIdsAndVts;
-    ScopedAStatus remoteRet = mCameraService->getCameraVendorTagSections(&providerIdsAndVts);
+    ScopedAStatus remoteRet = cameraService->getCameraVendorTagSections(&providerIdsAndVts);
 
     if (!remoteRet.isOk()) {
         if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
@@ -261,15 +261,12 @@
         ALOGE("%s: Could not get ICameraService instance.", __FUNCTION__);
         return nullptr;
     }
-
     if (mDeathRecipient.get() == nullptr) {
         mDeathRecipient = ndk::ScopedAIBinder_DeathRecipient(
                 AIBinder_DeathRecipient_new(CameraManagerGlobal::binderDeathCallback));
+        AIBinder_linkToDeath(cameraService->asBinder().get(),
+                             mDeathRecipient.get(), /*cookie=*/ this);
     }
-    AIBinder_linkToDeath(cameraService->asBinder().get(),
-                         mDeathRecipient.get(), /*cookie=*/ this);
-
-    mCameraService = cameraService;
 
     // Setup looper thread to perform availability callbacks
     if (mCbLooper == nullptr) {
@@ -291,33 +288,25 @@
         mCbLooper->registerHandler(mHandler);
     }
 
+    std::vector<CameraStatusAndId> cameraStatuses;
     // register ICameraServiceListener
     if (mCameraServiceListener == nullptr) {
         mCameraServiceListener = ndk::SharedRefBase::make<CameraServiceListener>(weak_from_this());
-    }
-
-    std::vector<CameraStatusAndId> cameraStatuses;
-    Status status = Status::NO_ERROR;
-    ScopedAStatus remoteRet = mCameraService->addListener(mCameraServiceListener,
-                                                          &cameraStatuses);
-
-    if (!remoteRet.isOk()) {
-        if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
-            Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
-            ALOGE("%s: Failed to add listener to camera service: %s", __FUNCTION__,
-                toString(errStatus).c_str());
-        } else {
-            ALOGE("%s: Transaction failed when adding listener to camera service: %d",
-                __FUNCTION__, remoteRet.getExceptionCode());
+        ScopedAStatus remoteRet = cameraService->addListener(mCameraServiceListener,
+                                                            &cameraStatuses);
+        if (!remoteRet.isOk()) {
+            if (remoteRet.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+                Status errStatus = static_cast<Status>(remoteRet.getServiceSpecificError());
+                ALOGE("%s: Failed to add listener to camera service: %s", __FUNCTION__,
+                    toString(errStatus).c_str());
+            } else {
+                ALOGE("%s: Transaction failed when adding listener to camera service: %d",
+                    __FUNCTION__, remoteRet.getExceptionCode());
+            }
+            return nullptr;
         }
     }
 
-    // Setup vendor tags
-    if (!setupVendorTags()) {
-        ALOGE("Unable to set up vendor tags");
-        return nullptr;
-    }
-
     for (auto& csi: cameraStatuses){
         onStatusChangedLocked(csi.deviceStatus, csi.cameraId);
 
@@ -326,6 +315,13 @@
                                   csi.cameraId, unavailablePhysicalId);
         }
     }
+
+   // Setup vendor tags
+    if (!setupVendorTags(cameraService)) {
+        ALOGE("Unable to set up vendor tags");
+        return nullptr;
+    }
+    mCameraService = cameraService;
     return mCameraService;
 }
 
@@ -346,6 +342,8 @@
         instance->onStatusChangedLocked(deviceStatus, cameraId);
     }
     instance->mCameraService.reset();
+    instance->mDeathRecipient.release();
+    instance->mCameraServiceListener.reset();
     // TODO: consider adding re-connect call here?
 }
 
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 2d8eefa..5688e76 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -188,7 +188,7 @@
                          const std::string &physicalCameraId);
     void onStatusChangedLocked(const CameraDeviceStatus &status, const std::string &cameraId,
                                const std::string &physicalCameraId);
-    bool setupVendorTags();
+    bool setupVendorTags(std::shared_ptr<ICameraService> &cameraService);
 
     // Utils for status
     static bool validStatus(CameraDeviceStatus status);
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index a5aeff2..47418cf 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -172,6 +172,12 @@
 }
 
 cc_aconfig_library {
+    name: "android.media.audio-aconfig-cc",
+    aconfig_declarations: "android.media.audio-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
+cc_aconfig_library {
     name: "android.media.audiopolicy-aconfig-cc",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
     defaults: ["audio-aconfig-cc-defaults"],
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 411996e..0b434f7 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -1,5 +1,6 @@
 # Top level framework (android.media) flags
 # Only add flags here which must be included in framework.jar
+# Flags used in both framework.jar and native can be added here
 #
 # Please add flags in alphabetical order.
 
@@ -22,6 +23,15 @@
     bug: "302323921"
 }
 
+flag {
+    name: "concurrent_audio_record_bypass_permission"
+    namespace: "media_audio"
+    description:
+       "New privileged permission to allow bypassing concurrent audio"
+       "capture rules."
+    bug: "374751406"
+}
+
 flag{
     name: "enable_ringtone_haptics_customization"
     namespace: "media_audio"
@@ -144,6 +154,14 @@
     is_fixed_read_only: true
 }
 
+flag {
+    name: "routed_device_ids"
+    namespace: "media_audio"
+    description:
+        "Enable Java and native functions to get"
+        "multiple routed device ids"
+    bug: "367816690"
+}
 
 # TODO remove
 flag {
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 90996a3..cdcce08 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -1057,6 +1057,14 @@
     return OK;
 }
 
+namespace {
+    // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
+    // and universaly administered
+    constexpr std::array<uint8_t, 4> BTANON_PREFIX {0xFD, 0xFF, 0xFF, 0xFF};
+    // Keep sync with ServiceUtilities.cpp mustAnonymizeBluetoothAddress
+    constexpr const char * BTANON_PREFIX_STR = "XX:XX:XX:XX:";
+}
+
 ::android::status_t aidl2legacy_AudioDevice_audio_device(
         const AudioDevice& aidl,
         audio_devices_t* legacyType, std::string* legacyAddress) {
@@ -1071,8 +1079,16 @@
         case Tag::mac: {
             const std::vector<uint8_t>& mac = aidl.address.get<AudioDeviceAddress::mac>();
             if (mac.size() != 6) return BAD_VALUE;
-            snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
-                    mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+            if (std::equal(BTANON_PREFIX.begin(), BTANON_PREFIX.end(), mac.begin())) {
+                // special case for anonymized mac address:
+                // change anonymized bytes back from FD:FF:FF:FF: to XX:XX:XX:XX:
+                snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+                        "%s%02X:%02X", BTANON_PREFIX_STR, mac[4], mac[5]);
+            } else {
+                snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN,
+                        "%02X:%02X:%02X:%02X:%02X:%02X",
+                        mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
+            }
         } break;
         case Tag::ipv4: {
             const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
@@ -1134,8 +1150,20 @@
         switch (suggestDeviceAddressTag(aidl.type)) {
             case Tag::mac: {
                 std::vector<uint8_t> mac(6);
-                int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
-                        &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+                int status;
+                // special case for anonymized mac address:
+                // change anonymized bytes so that they can be scanned as HEX bytes
+                if (legacyAddress.starts_with(BTANON_PREFIX_STR)) {
+                    std::copy(BTANON_PREFIX.begin(), BTANON_PREFIX.end(), mac.begin());
+                    LOG_ALWAYS_FATAL_IF(legacyAddress.length() <= strlen(BTANON_PREFIX_STR));
+                    status = sscanf(legacyAddress.c_str() + strlen(BTANON_PREFIX_STR),
+                                        "%hhX:%hhX",
+                                        &mac[4], &mac[5]);
+                    status += 4;
+                } else {
+                    status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+                            &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
+                }
                 if (status != mac.size()) {
                     ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
                     return unexpected(BAD_VALUE);
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 5d7daa4..d55932d 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -205,6 +205,7 @@
                   __func__, status, policyInfos.size());
         }
         const auto endTime = std::chrono::steady_clock::now();
+        af->startupFinished();
         using FloatMillis = std::chrono::duration<float, std::milli>;
         const float timeTaken = std::chrono::duration_cast<FloatMillis>(
                 endTime - startTime).count();
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 722b13a..93009c4 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -466,6 +466,7 @@
 
 aom_codec_err_t C2SoftAomEnc::setupCodecParameters() {
     aom_codec_err_t codec_return = AOM_CODEC_OK;
+    const int maxIntraBitratePct = mBitrateControlMode == AOM_CBR ? 300 : 450;
 
     codec_return = aom_codec_control(mCodecContext, AV1E_SET_TARGET_SEQ_LEVEL_IDX, mAV1EncLevel);
     if (codec_return != AOM_CODEC_OK) goto BailOut;
@@ -492,6 +493,10 @@
     codec_return = aom_codec_control(mCodecContext, AV1E_SET_AQ_MODE, 3);
     if (codec_return != AOM_CODEC_OK) goto BailOut;
 
+    codec_return = aom_codec_control(mCodecContext, AOME_SET_MAX_INTRA_BITRATE_PCT,
+                                     maxIntraBitratePct);
+    if (codec_return != AOM_CODEC_OK) goto BailOut;
+
     codec_return = aom_codec_control(mCodecContext, AV1E_SET_COEFF_COST_UPD_FREQ, 3);
     if (codec_return != AOM_CODEC_OK) goto BailOut;
 
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 3c8c1b7..15a7a6a 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -16,6 +16,10 @@
 cc_library_shared {
     name: "libsfplugin_ccodec",
 
+    defaults: [
+        "android.hardware.graphics.common-ndk_shared",
+    ],
+
     export_include_dirs: ["include"],
 
     srcs: [
@@ -56,7 +60,6 @@
         "android.hardware.drm@1.0",
         "android.hardware.media.c2@1.0",
         "android.hardware.media.omx@1.0",
-        "android.hardware.graphics.common-V5-ndk",
         "graphicbuffersource-aidl-ndk",
         "libbase",
         "libbinder",
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index a1551f8..f4637e3 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -66,6 +66,7 @@
         "libmedia_helper",
         "libmediametrics",
         "libprocessgroup",
+        "libprocessgroup_util",
         "mediametricsservice-aidl-cpp",
         "shared-file-region-aidl-cpp",
     ],
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 5b954f7..b193950 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -1159,11 +1159,10 @@
             // start of lock scope
             AutoMutex lock(mLock);
 
-            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
-                if (newSequence == oldSequence) {
+                if (mSequence == oldSequence) {
                     if (!audio_is_linear_pcm(mFormat)) {
                         // If compressed capture, don't attempt to restore the track.
                         // Return a DEAD_OBJECT error and let the caller recreate.
@@ -1179,7 +1178,7 @@
                     }
                 }
             }
-            oldSequence = newSequence;
+            oldSequence = mSequence;
 
             // Keep the extra references
             proxy = mProxy;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index ee44074..769475c 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -308,11 +308,11 @@
 }
 
 status_t AudioSystem::setStreamVolume(audio_stream_type_t stream, float value,
-                                      audio_io_handle_t output) {
+                                      bool muted, audio_io_handle_t output) {
     if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
     const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
-    af->setStreamVolume(stream, value, output);
+    af->setStreamVolume(stream, value, muted, output);
     return NO_ERROR;
 }
 
@@ -325,14 +325,15 @@
 }
 
 status_t AudioSystem::setPortsVolume(
-        const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+        const std::vector<audio_port_handle_t>& portIds, float volume, bool muted,
+        audio_io_handle_t output) {
     const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
     std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
             convertContainer<std::vector<int32_t>>(
                     portIds, legacy2aidl_audio_port_handle_t_int32_t));
     int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-    af->setPortsVolume(portIdsAidl, volume, outputAidl);
+    af->setPortsVolume(portIdsAidl, volume, muted, outputAidl);
     return NO_ERROR;
 }
 
@@ -1096,7 +1097,8 @@
                                        std::vector<audio_io_handle_t>* secondaryOutputs,
                                        bool *isSpatialized,
                                        bool *isBitPerfect,
-                                       float *volume) {
+                                       float *volume,
+                                       bool *muted) {
     if (attr == nullptr) {
         ALOGE("%s NULL audio attributes", __func__);
         return BAD_VALUE;
@@ -1163,6 +1165,7 @@
     *attr = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioAttributes_audio_attributes_t(responseAidl.attr));
     *volume = responseAidl.volume;
+    *muted = responseAidl.muted;
 
     return OK;
 }
@@ -1326,6 +1329,7 @@
 
 status_t AudioSystem::setStreamVolumeIndex(audio_stream_type_t stream,
                                            int index,
+                                           bool muted,
                                            audio_devices_t device) {
     const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
@@ -1336,7 +1340,7 @@
     AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     return statusTFromBinderStatus(
-            aps->setStreamVolumeIndex(streamAidl, deviceAidl, indexAidl));
+            aps->setStreamVolumeIndex(streamAidl, deviceAidl, indexAidl, muted));
 }
 
 status_t AudioSystem::getStreamVolumeIndex(audio_stream_type_t stream,
@@ -1360,6 +1364,7 @@
 
 status_t AudioSystem::setVolumeIndexForAttributes(const audio_attributes_t& attr,
                                                   int index,
+                                                  bool muted,
                                                   audio_devices_t device) {
     const sp<IAudioPolicyService> aps = get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
@@ -1370,7 +1375,7 @@
     AudioDeviceDescription deviceAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_devices_t_AudioDeviceDescription(device));
     return statusTFromBinderStatus(
-            aps->setVolumeIndexForAttributes(attrAidl, deviceAidl, indexAidl));
+            aps->setVolumeIndexForAttributes(attrAidl, deviceAidl, indexAidl, muted));
 }
 
 status_t AudioSystem::getVolumeIndexForAttributes(const audio_attributes_t& attr,
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index d7c0b5b..e0c5e92 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -2221,11 +2221,10 @@
         {   // start of lock scope
             AutoMutex lock(mLock);
 
-            uint32_t newSequence = mSequence;
             // did previous obtainBuffer() fail due to media server death or voluntary invalidation?
             if (status == DEAD_OBJECT) {
                 // re-create track, unless someone else has already done so
-                if (newSequence == oldSequence) {
+                if (mSequence == oldSequence) {
                     status = restoreTrack_l("obtainBuffer");
                     if (status != NO_ERROR) {
                         buffer.mFrameCount = 0;
@@ -2235,7 +2234,7 @@
                     }
                 }
             }
-            oldSequence = newSequence;
+            oldSequence = mSequence;
 
             if (status == NOT_ENOUGH_DATA) {
                 restartIfDisabled();
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 9241973..168b47e 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -337,11 +337,12 @@
 }
 
 status_t AudioFlingerClientAdapter::setStreamVolume(audio_stream_type_t stream, float value,
-                                                    audio_io_handle_t output) {
+                                                    bool muted, audio_io_handle_t output) {
     AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
     int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-    return statusTFromBinderStatus(mDelegate->setStreamVolume(streamAidl, value, outputAidl));
+    return statusTFromBinderStatus(
+            mDelegate->setStreamVolume(streamAidl, value, muted, outputAidl));
 }
 
 status_t AudioFlingerClientAdapter::setStreamMute(audio_stream_type_t stream, bool muted) {
@@ -351,12 +352,14 @@
 }
 
 status_t AudioFlingerClientAdapter::setPortsVolume(
-        const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+        const std::vector<audio_port_handle_t> &portIds, float volume, bool muted,
+        audio_io_handle_t output) {
     std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
             convertContainer<std::vector<int32_t>>(
                     portIds, legacy2aidl_audio_port_handle_t_int32_t));
     int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-    return statusTFromBinderStatus(mDelegate->setPortsVolume(portIdsAidl, volume, outputAidl));
+    return statusTFromBinderStatus(
+            mDelegate->setPortsVolume(portIdsAidl, volume, muted, outputAidl));
 }
 
 status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
@@ -1007,12 +1010,13 @@
 }
 
 Status AudioFlingerServerAdapter::setStreamVolume(AudioStreamType stream, float value,
-                                                  int32_t output) {
+                                                  bool muted, int32_t output) {
     audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
     audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_int32_t_audio_io_handle_t(output));
-    return Status::fromStatusT(mDelegate->setStreamVolume(streamLegacy, value, outputLegacy));
+    return Status::fromStatusT(
+            mDelegate->setStreamVolume(streamLegacy, value, muted, outputLegacy));
 }
 
 Status AudioFlingerServerAdapter::setStreamMute(AudioStreamType stream, bool muted) {
@@ -1022,13 +1026,14 @@
 }
 
 Status AudioFlingerServerAdapter::setPortsVolume(
-        const std::vector<int32_t>& portIds, float volume, int32_t output) {
+        const std::vector<int32_t>& portIds, float volume, bool muted, int32_t output) {
     std::vector<audio_port_handle_t> portIdsLegacy = VALUE_OR_RETURN_BINDER(
             convertContainer<std::vector<audio_port_handle_t>>(
                     portIds, aidl2legacy_int32_t_audio_port_handle_t));
     audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_int32_t_audio_io_handle_t(output));
-    return Status::fromStatusT(mDelegate->setPortsVolume(portIdsLegacy, volume, outputLegacy));
+    return Status::fromStatusT(
+            mDelegate->setPortsVolume(portIdsLegacy, volume, muted, outputLegacy));
 }
 
 Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index 4b26d5b..d3975c0 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -41,4 +41,6 @@
     AudioAttributes attr;
     /** initial port volume for the new audio track */
     float volume;
+    /** initial port muted state for the new audio track */
+    boolean muted;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 1c825bc..474ab11 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -96,15 +96,17 @@
     /*
      * Set stream type state. This will probably be used by
      * the preference panel, mostly.
+     * This method is deprecated. Please use the setPortsVolume method instead.
      */
-    void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
+    void setStreamVolume(AudioStreamType stream, float value, boolean muted,
+            int /* audio_io_handle_t */ output);
     void setStreamMute(AudioStreamType stream, boolean muted);
 
     /*
      * Set AudioTrack port ids volume attribute. This is the new way of controlling volume from
      * AudioPolicyManager to AudioFlinger.
      */
-    void setPortsVolume(in int[] /* audio_port_handle_t[] */ portIds, float volume,
+    void setPortsVolume(in int[] /* audio_port_handle_t[] */ portIds, float volume, boolean muted,
             int /* audio_io_handle_t */ output);
 
     // set audio mode.
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index b4f879a..40ab938 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -126,14 +126,14 @@
 
     void setStreamVolumeIndex(AudioStreamType stream,
                               in AudioDeviceDescription device,
-                              int index);
+                              int index, boolean muted);
 
     int getStreamVolumeIndex(AudioStreamType stream,
                              in AudioDeviceDescription device);
 
     void setVolumeIndexForAttributes(in AudioAttributes attr,
                                      in AudioDeviceDescription device,
-                                     int index);
+                                     int index, boolean muted);
 
     int getVolumeIndexForAttributes(in AudioAttributes attr,
                                     in AudioDeviceDescription device);
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 61d5ccd..a215c0b 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -30,6 +30,7 @@
         "libjsoncpp",
         "libmediametricsservice",
         "libprocessgroup",
+        "libprocessgroup_util",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index a95c700..8bca8df 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -46,6 +46,7 @@
         "libmediametrics",
         "libmediametricsservice",
         "libprocessgroup",
+        "libprocessgroup_util",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index 710a656..b0b7e03 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -511,11 +511,11 @@
 
         stream = getValue(&mFdp, kStreamtypes);
         AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
-                                     mFdp.ConsumeIntegral<int32_t>());
+                                     mFdp.ConsumeBool(), mFdp.ConsumeIntegral<int32_t>());
     } else {
         std::vector <audio_port_handle_t> portsForVolumeChange{};
         AudioSystem::setPortsVolume(portsForVolumeChange, mFdp.ConsumeFloatingPoint<float>(),
-                                    mFdp.ConsumeIntegral<int32_t>());
+                                    mFdp.ConsumeBool(), mFdp.ConsumeIntegral<int32_t>());
     }
     audio_mode_t mode = getValue(&mFdp, kModes);
     AudioSystem::setMode(mode);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 40e5673..31e4f05 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -126,7 +126,7 @@
 
     // set stream volume on specified output
     static status_t setStreamVolume(audio_stream_type_t stream, float value,
-                                    audio_io_handle_t output);
+                                    bool muted, audio_io_handle_t output);
 
     // mute/unmute stream
     static status_t setStreamMute(audio_stream_type_t stream, bool mute);
@@ -135,11 +135,12 @@
      * Set volume for given AudioTrack port ids on specified output
      * @param portIds to consider
      * @param volume to set
+     * @param muted to set
      * @param output to consider
      * @return NO_ERROR if successful
      */
     static status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds,
-                                   float volume, audio_io_handle_t output);
+                                   float volume, bool muted, audio_io_handle_t output);
 
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
@@ -345,7 +346,8 @@
                                      std::vector<audio_io_handle_t> *secondaryOutputs,
                                      bool *isSpatialized,
                                      bool *isBitPerfect,
-                                     float *volume);
+                                     float *volume,
+                                     bool *muted);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
@@ -391,6 +393,7 @@
                                      int indexMax);
     static status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                          int index,
+                                         bool muted,
                                          audio_devices_t device);
     static status_t getStreamVolumeIndex(audio_stream_type_t stream,
                                          int *index,
@@ -398,6 +401,7 @@
 
     static status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
                                                 int index,
+                                                bool muted,
                                                 audio_devices_t device);
     static status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
                                                 int &index,
diff --git a/media/libaudioclient/include/media/AudioTimestamp.h b/media/libaudioclient/include/media/AudioTimestamp.h
index e5925dd..45d5595 100644
--- a/media/libaudioclient/include/media/AudioTimestamp.h
+++ b/media/libaudioclient/include/media/AudioTimestamp.h
@@ -154,10 +154,13 @@
     std::string toString() const {
         std::stringstream ss;
 
-        ss << "BOOTTIME offset " << mTimebaseOffset[TIMEBASE_BOOTTIME] << "\n";
+        ss << "BOOTTIME offset " << mTimebaseOffset[TIMEBASE_BOOTTIME] << ": ExtendedTimestamp: ";
         for (int i = 0; i < LOCATION_MAX; ++i) {
-            ss << "ExtendedTimestamp[" << i << "]  position: "
-                    << mPosition[i] << "  time: "  << mTimeNs[i] << "\n";
+            ss << "([" << i << "]  position: "
+                    << mPosition[i] << "  time: "  << mTimeNs[i] << ")";
+            if (i != LOCATION_MAX - 1) {
+                ss << ", ";
+            }
         }
         return ss.str();
     }
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index a5f3217..21ecb09 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -226,18 +226,19 @@
      * the preference panel, mostly.
      */
     virtual     status_t    setStreamVolume(audio_stream_type_t stream, float value,
-                                    audio_io_handle_t output) = 0;
+                                    bool muted, audio_io_handle_t output) = 0;
     virtual     status_t    setStreamMute(audio_stream_type_t stream, bool muted) = 0;
 
     /**
      * Set volume for given AudioTrack port ids on specified output
      * @param portIds to consider
      * @param volume to set
+     * @param muted to set
      * @param output to consider
      * @return NO_ERROR if successful
      */
     virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
-            audio_io_handle_t output) = 0;
+            bool muted, audio_io_handle_t output) = 0;
 
     // set audio mode
     virtual     status_t    setMode(audio_mode_t mode) = 0;
@@ -428,10 +429,10 @@
     status_t setMasterBalance(float balance) override;
     status_t getMasterBalance(float* balance) const override;
     status_t setStreamVolume(audio_stream_type_t stream, float value,
-                             audio_io_handle_t output) override;
+                             bool muted, audio_io_handle_t output) override;
     status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
     status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
-            audio_io_handle_t output) override;
+            bool muted, audio_io_handle_t output) override;
     status_t setMode(audio_mode_t mode) override;
     status_t setMicMute(bool state) override;
     bool getMicMute() const override;
@@ -675,10 +676,10 @@
     Status setMasterBalance(float balance) override;
     Status getMasterBalance(float* _aidl_return) override;
     Status setStreamVolume(media::audio::common::AudioStreamType stream,
-                           float value, int32_t output) override;
+                           float value, bool muted, int32_t output) override;
     Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
-    Status setPortsVolume(const std::vector<int32_t>& portIds, float volume, int32_t output)
-            override;
+    Status setPortsVolume(const std::vector<int32_t>& portIds, float volume, bool muted,
+                          int32_t output) override;
     Status setMode(media::audio::common::AudioMode mode) override;
     Status setMicMute(bool state) override;
     Status getMicMute(bool* _aidl_return) override;
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 7f55e48..2076045 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -483,8 +483,27 @@
                                  AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
                                          std::vector<int32_t>{1, 2}))));
 
+TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
+    const std::vector<uint8_t> sAnonymizedAidlAddress {0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
+    const std::string sAnonymizedLegacyAddress = std::string("XX:XX:XX:XX:AB:CD");
+    auto device = legacy2aidl_audio_device_AudioDevice(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
+                                                       sAnonymizedLegacyAddress);
+    ASSERT_TRUE(device.ok());
+    ASSERT_EQ(AudioDeviceAddress::Tag::mac, device.value().address.getTag());
+    ASSERT_EQ(sAnonymizedAidlAddress, device.value().address.get<AudioDeviceAddress::mac>());
+
+    audio_devices_t legacyType;
+    std::string legacyAddress;
+    status_t status =
+            aidl2legacy_AudioDevice_audio_device(device.value(), &legacyType, &legacyAddress);
+    ASSERT_EQ(OK, status);
+    EXPECT_EQ(legacyType, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP);
+    EXPECT_EQ(sAnonymizedLegacyAddress, legacyAddress);
+}
+
 class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
 };
+
 TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
     auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index d65701a..c4e4ae8 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -443,9 +443,29 @@
     if (!mStream) return NO_INIT;
 
     if (const auto state = getState(); isInPlayOrRecordState(state)) {
-        return sendCommand(
-                makeHalCommand<HalCommand::Tag::pause>(), reply,
+        StreamDescriptor::Reply localReply{};
+        StreamDescriptor::Reply* innerReply = reply ?: &localReply;
+        auto status = sendCommand(
+                makeHalCommand<HalCommand::Tag::pause>(), innerReply,
                 true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+        if (status == STATUS_INVALID_OPERATION &&
+                !isInPlayOrRecordState(innerReply->state)) {
+            /**
+             * In case of transient states like DRAINING, the HAL may change its
+             * StreamDescriptor::State on its own and may not be in synchronization with client.
+             * Thus, client can send the unexpected command and HAL returns failure. such failure is
+             * natural. The client handles it gracefully.
+             * Example where HAL change its state,
+             * 1) DRAINING -> IDLE (on empty buffer)
+             * 2) DRAINING -> IDLE (on IStreamCallback::onDrainReady)
+             **/
+            AUGMENT_LOG(D,
+                        "HAL failed to handle the 'pause' command, but stream state is in one of"
+                        " the PAUSED kind of states, current state: %s",
+                        toString(state).c_str());
+            return OK;
+        }
+        return status;
     } else {
         AUGMENT_LOG(D, "already stream in one of the PAUSED kind of states, current state: %s",
                 toString(state).c_str());
@@ -473,13 +493,9 @@
                 return INVALID_OPERATION;
             }
             return OK;
-        } else if (state == StreamDescriptor::State::PAUSED ||
-                   state == StreamDescriptor::State::TRANSFER_PAUSED ||
-                   state == StreamDescriptor::State::DRAIN_PAUSED) {
+        } else if (isInPausedState(state)) {
             return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
-        } else if (state == StreamDescriptor::State::ACTIVE ||
-                   state == StreamDescriptor::State::TRANSFERRING ||
-                   state == StreamDescriptor::State::DRAINING) {
+        } else if (isInPlayOrRecordState(state)) {
             AUGMENT_LOG(D, "already in stream state: %s", toString(state).c_str());
             return OK;
         } else {
@@ -528,7 +544,14 @@
 }
 
 void StreamHalAidl::onAsyncTransferReady() {
-    if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) {
+    StreamDescriptor::State state;
+    {
+        // Use 'mCommandReplyLock' to ensure that 'sendCommand' has finished updating the state
+        // after the reply from the 'burst' command.
+        std::lock_guard l(mCommandReplyLock);
+        state = getState();
+    }
+    if (state == StreamDescriptor::State::TRANSFERRING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
@@ -539,7 +562,14 @@
 }
 
 void StreamHalAidl::onAsyncDrainReady() {
-    if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
+    StreamDescriptor::State state;
+    {
+        // Use 'mCommandReplyLock' to ensure that 'sendCommand' has finished updating the state
+        // after the reply from the 'drain' command.
+        std::lock_guard l(mCommandReplyLock);
+        state = getState();
+    }
+    if (state == StreamDescriptor::State::DRAINING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
diff --git a/media/libmedia/xsd/vts/Android.bp b/media/libmedia/xsd/vts/Android.bp
index 83ab977..add7b51 100644
--- a/media/libmedia/xsd/vts/Android.bp
+++ b/media/libmedia/xsd/vts/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_android_kernel",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 10a1da7..761137e 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -2514,6 +2514,15 @@
     {
         Mutex::Autolock lock(mLock);
         track = mTrack;
+    }
+
+    // do not hold lock while joining.
+    if (track) {
+        track->stopAndJoinCallbacks();
+    }
+
+    {
+        Mutex::Autolock lock(mLock);
         close_l(); // clears mTrack
     }
     // destruction of the track occurs outside of mutex.
diff --git a/media/libstagefright/xmlparser/vts/Android.bp b/media/libstagefright/xmlparser/vts/Android.bp
index 1e36c8f..527230c 100644
--- a/media/libstagefright/xmlparser/vts/Android.bp
+++ b/media/libstagefright/xmlparser/vts/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_android_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index d916fd1..b5124d0 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -51,6 +51,11 @@
     ],
 }
 
+vintf_fragment {
+    name: "manifest_media_c2_software.xml",
+    src: "manifest_media_c2_software.xml",
+}
+
 mediaserver_cc_binary {
     name: "mediaserver",
 
@@ -88,7 +93,7 @@
         "-Wall",
     ],
 
-    vintf_fragments: ["manifest_media_c2_software.xml"],
+    vintf_fragment_modules: ["manifest_media_c2_software.xml"],
 
     soong_config_variables: {
         TARGET_DYNAMIC_64_32_MEDIASERVER: {
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
index 5c1a010..8b273f3 100644
--- a/media/module/aidlpersistentsurface/Android.bp
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -3,6 +3,9 @@
     unstable: true,
     local_include_dir: "aidl",
     min_sdk_version: "29",
+    defaults: [
+        "android.hardware.graphics.common-latest"
+    ],
     srcs: [
         "aidl/android/media/AidlColorAspects.aidl",
         "aidl/android/media/IAidlGraphicBufferSource.aidl",
@@ -12,9 +15,6 @@
     headers: [
         "HardwareBuffer_aidl",
     ],
-    imports: [
-        "android.hardware.graphics.common-V5",
-    ],
     include_dirs: [
         "frameworks/native/aidl/gui",
     ],
@@ -41,6 +41,9 @@
 cc_library_shared {
     name: "libstagefright_graphicbuffersource_aidl",
     min_sdk_version: "29",
+    defaults: [
+        "android.hardware.graphics.common-ndk_shared",
+    ],
     srcs: [
         "AidlGraphicBufferSource.cpp",
         "wrapper/WAidlGraphicBufferSource.cpp",
@@ -56,7 +59,6 @@
         "media_plugin_headers",
     ],
     shared_libs: [
-        "android.hardware.graphics.common-V5-ndk",
         "graphicbuffersource-aidl-ndk",
         "libbinder_ndk",
         "libcutils",
diff --git a/media/module/codecs/amrwb/enc/Android.bp b/media/module/codecs/amrwb/enc/Android.bp
index 04f36b5..6ca3b6e 100644
--- a/media/module/codecs/amrwb/enc/Android.bp
+++ b/media/module/codecs/amrwb/enc/Android.bp
@@ -96,8 +96,6 @@
                 "-DARM",
                 "-DARMV7",
                 "-DASM_OPT",
-                // don't actually generate neon instructions, see bug 26932980
-                "-mfpu=vfpv3",
             ],
             local_include_dirs: [
                 "src/asm/ARMV7",
diff --git a/media/module/extractors/Android.bp b/media/module/extractors/Android.bp
index f654ecd..e29d3e6 100644
--- a/media/module/extractors/Android.bp
+++ b/media/module/extractors/Android.bp
@@ -28,6 +28,10 @@
         "liblog",
     ],
 
+    static_libs: [
+        "libstagefright_metadatautils",
+    ],
+
     // extractors are expected to run on Q(29)
     min_sdk_version: "29",
     apex_available: [
@@ -56,6 +60,7 @@
                 "libutils",
                 "libmediandk_format",
                 "libmedia_ndkformatpriv",
+                "libstagefright_metadatautils",
             ],
         },
     },
@@ -68,3 +73,21 @@
         ],
     },
 }
+
+aconfig_declarations {
+    name: "android.media.extractor.flags-aconfig",
+    package: "com.android.media.extractor.flags",
+    container: "com.android.media",
+    srcs: ["extractor.aconfig"],
+}
+
+cc_aconfig_library {
+    name: "android.media.extractor.flags-aconfig-cc",
+    aconfig_declarations: "android.media.extractor.flags-aconfig",
+    host_supported: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+    ],
+}
diff --git a/media/module/extractors/extractor.aconfig b/media/module/extractors/extractor.aconfig
new file mode 100644
index 0000000..c9bf694
--- /dev/null
+++ b/media/module/extractors/extractor.aconfig
@@ -0,0 +1,14 @@
+# Media Extractor flags.
+#
+# !!! Please add flags in alphabetical order. !!!
+package: "com.android.media.extractor.flags"
+container: "com.android.media"
+
+flag {
+    name: "extractor_sniff_midi_optimizations"
+    is_exported: true
+    is_fixed_read_only: true
+    namespace: "media_extractor"
+    description: "Enable SniffMidi optimizations."
+    bug: "359920208"
+}
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index 7a49d8e..3da1589 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -24,6 +24,7 @@
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_license"],
+    default_team: "trendy_team_android_media_solutions_playback",
 }
 
 cc_defaults {
@@ -131,6 +132,7 @@
         "libstagefright_id3",
         "libstagefright_esds",
         "libmp4extractor",
+        "libstagefright_metadatautils",
     ],
 
     dictionary: "mp4_extractor_fuzzer.dict",
@@ -301,12 +303,18 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libsonivox",
         "libmedia_midiiowrapper",
         "libmidiextractor",
         "libwatchdog",
     ],
 
+    shared_libs: [
+        "server_configurable_flags",
+    ],
+
     dictionary: "midi_extractor_fuzzer.dict",
 
     host_supported: true,
diff --git a/media/module/extractors/midi/Android.bp b/media/module/extractors/midi/Android.bp
index feabf9e..0eb34fc 100644
--- a/media/module/extractors/midi/Android.bp
+++ b/media/module/extractors/midi/Android.bp
@@ -32,6 +32,8 @@
     ],
 
     static_libs: [
+        "android.media.extractor.flags-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libmedia_midiiowrapper",
         "libsonivoxwithoutjet",
         "libstagefright_foundation",
@@ -40,6 +42,7 @@
 
     shared_libs: [
         "libbase",
+        "server_configurable_flags",
     ],
 
     host_supported: true,
diff --git a/media/module/extractors/midi/MidiExtractor.cpp b/media/module/extractors/midi/MidiExtractor.cpp
index 167cc40..98d7716 100644
--- a/media/module/extractors/midi/MidiExtractor.cpp
+++ b/media/module/extractors/midi/MidiExtractor.cpp
@@ -20,6 +20,7 @@
 
 #include "MidiExtractor.h"
 
+#include <com_android_media_extractor_flags.h>
 #include <media/MidiIoWrapper.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/MediaBufferGroup.h>
@@ -323,10 +324,97 @@
     return AMediaFormat_copy(meta, mFileMetadata);
 }
 
-// Sniffer
+static bool startsWith(const uint8_t *buf, size_t size, const char *pattern, size_t patternSize) {
+    if (size < patternSize) {
+        return false;
+    }
+    return (memcmp(buf, pattern, patternSize) == 0);
+}
 
-bool SniffMidi(CDataSource *source, float *confidence)
-{
+static bool isValidMThd(const uint8_t *buf, size_t size) {
+    return startsWith(buf, size, "MThd", 4);
+}
+
+static bool isValidXmf(const uint8_t *buf, size_t size) {
+    return startsWith(buf, size, "XMF_", 4);
+}
+
+static bool isValidImelody(const uint8_t *buf, size_t size) {
+    return startsWith(buf, size, "BEGIN:IMELODY", 13);
+}
+
+static bool isValidRtttl(const uint8_t *buf, size_t size) {
+    #define RTTTL_MAX_TITLE_LEN 32
+    // rtttl starts with the following:
+    // <title>:<type>=<value>
+    //
+    // Where:
+    // - <title>: Up to 32 characters
+    // - <type>: Single character indicating:
+    //     'd' for duration
+    //     'o' for octave
+    //     'b' for beats per minute
+    // - <value>: Corresponding value for the type
+    if (size < 4) {
+        return false;
+    }
+    for (size_t i = 0; i < RTTTL_MAX_TITLE_LEN && i < size; i++) {
+        if (buf[i] == ':') {
+            if (i < (size - 3) && buf[i + 2] == '=') {
+                return true;
+            }
+            break;
+        }
+    }
+    return false;
+}
+
+static bool isValidOta(const uint8_t *buf, size_t size) {
+    #define OTA_RINGTONE 0x25
+    #define OTA_SOUND 0x1d
+    #define OTA_UNICODE 0x22
+
+    // ota starts with the following:
+    // <cmdLen><cmd1><cmd2>..<cmdN>
+    //
+    // Where:
+    // - <cmdLen>: Single character command length
+    // - <cmd1>: Single character (OTA_RINGTONE << 1)
+    // - <cmd2>: Single character (OTA_SOUND << 1) or (OTA_UNICODE << 1)
+    //           and so on with last cmd being (0x1d << 1)
+
+    if (size < 3) {
+        return false;
+    }
+
+    uint8_t cmdLen = buf[0];
+    if (cmdLen < 2) {
+        return false;
+    }
+
+    if ((buf[1] >> 1) != OTA_RINGTONE) {
+        return false;
+    }
+    cmdLen--;
+
+    size_t i = 2;
+    while(cmdLen && i < size) {
+        switch(buf[i] >> 1) {
+            case OTA_SOUND:
+                return true;
+            case OTA_UNICODE:
+                break;
+            default:
+                return false;
+        }
+        cmdLen--;
+        i++;
+    }
+
+    return false;
+}
+
+bool SniffMidiLegacy(CDataSource *source, float *confidence) {
     MidiEngine p(source, NULL, NULL);
     if (p.initCheck() == OK) {
         *confidence = 0.8;
@@ -335,7 +423,47 @@
     }
     ALOGV("SniffMidi: no");
     return false;
+}
 
+bool SniffMidiEfficiently(CDataSource *source, float *confidence) {
+    uint8_t header[128];
+    int filled = source->readAt(source->handle, 0, header, sizeof(header));
+
+    if (isValidMThd(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, MThd");
+        return true;
+    }
+    if (isValidXmf(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, XMF_");
+        return true;
+    }
+    if (isValidImelody(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, imelody");
+        return true;
+    }
+    if (isValidRtttl(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, rtttl");
+        return true;
+    }
+    if (isValidOta(header, filled)) {
+        *confidence = 0.80;
+        ALOGV("SniffMidi: yes, ota");
+        return true;
+    }
+    ALOGV("SniffMidi: no");
+    return false;
+}
+
+// Sniffer
+bool SniffMidi(CDataSource *source, float *confidence) {
+    if(com::android::media::extractor::flags::extractor_sniff_midi_optimizations()) {
+        return SniffMidiEfficiently(source, confidence);
+    }
+    return SniffMidiLegacy(source, confidence);
 }
 
 static const char *extensions[] = {
diff --git a/media/module/extractors/mkv/MatroskaExtractor.cpp b/media/module/extractors/mkv/MatroskaExtractor.cpp
index f326db1..10ae07a 100644
--- a/media/module/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/module/extractors/mkv/MatroskaExtractor.cpp
@@ -1787,7 +1787,7 @@
         return ERROR_MALFORMED;
     }
 
-    if (!MakeVP9CodecSpecificData(trackInfo->mMeta, tmpData.get(), frame.len)) {
+    if (!MakeVP9CodecSpecificDataFromFirstFrame(trackInfo->mMeta, tmpData.get(), frame.len)) {
         return ERROR_MALFORMED;
     }
 
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index f247f8c..12c0aaf 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -51,6 +51,7 @@
 #include <media/stagefright/MediaBufferGroup.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MetaDataBase.h>
+#include <media/stagefright/MetaDataUtils.h>
 #include <utils/String8.h>
 
 #include <byteswap.h>
@@ -2596,8 +2597,32 @@
             *offset += chunk_size;
             break;
         }
-
         case FOURCC("vpcC"):
+        {
+            if (mLastTrack == NULL) {
+                return ERROR_MALFORMED;
+            }
+
+            auto buffer = heapbuffer<uint8_t>(chunk_data_size);
+
+            if (buffer.get() == NULL) {
+                ALOGE("b/28471206");
+                return NO_MEMORY;
+            }
+
+            if (mDataSource->readAt(data_offset, buffer.get(), chunk_data_size) < chunk_data_size) {
+                return ERROR_IO;
+            }
+
+            if (!MakeVP9CodecPrivateFromVpcC(mLastTrack->meta, buffer.get(), chunk_data_size)) {
+                ALOGE("Failed to create VP9 CodecPrivate from vpcC.");
+                return ERROR_MALFORMED;
+            }
+
+            *offset += chunk_size;
+            break;
+        }
+
         case FOURCC("av1C"):
         {
             auto buffer = heapbuffer<uint8_t>(chunk_data_size);
diff --git a/media/module/metadatautils/MetaDataUtils.cpp b/media/module/metadatautils/MetaDataUtils.cpp
index 0895bb5..177438a 100644
--- a/media/module/metadatautils/MetaDataUtils.cpp
+++ b/media/module/metadatautils/MetaDataUtils.cpp
@@ -134,10 +134,54 @@
     }
     return true;
 }
+
+/**
+ * Build VP9 Codec Feature Metadata (CodecPrivate) to set CSD for VP9 codec.
+ * For reference:
+ * https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate.
+ *
+ * @param meta          A pointer to AMediaFormat object.
+ * @param profile       The profile value of the VP9 stream.
+ * @param level         The VP9 codec level. If the level is unknown, pass -1 to this parameter.
+ * @param bitDepth      The bit depth of the luma and color components of the VP9 stream.
+ * @param chromaSubsampling  The chroma subsampling of the VP9 stream. If chromaSubsampling is
+ *                           unknown, pass -1 to this parameter.
+ * @return true if CodecPrivate is set as CSD of AMediaFormat object.
+ *
+ */
+static bool MakeVP9CodecPrivate(AMediaFormat* meta, int32_t profile, int32_t level,
+                                int32_t bitDepth, int32_t chromaSubsampling) {
+    if (meta == nullptr) {
+        return false;
+    }
+
+    std::vector<uint8_t> codecPrivate;
+    // Construct CodecPrivate in WebM format (ID | Length | Data).
+    // Helper lambda to add a field to the codec private data
+    auto addField = [&codecPrivate](uint8_t id, uint8_t value) {
+        codecPrivate.push_back(id);
+        codecPrivate.push_back(0x01);  // Length is always 1
+        codecPrivate.push_back(value);
+    };
+
+    // Add fields
+    addField(0x01, static_cast<uint8_t>(profile));
+    if (level >= 0) {
+        addField(0x02, static_cast<uint8_t>(level));
+    }
+    addField(0x03, static_cast<uint8_t>(bitDepth));
+    if (chromaSubsampling >= 0) {
+        addField(0x04, static_cast<uint8_t>(chromaSubsampling));
+    }
+    // Set CSD in the meta format
+    AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, codecPrivate.data(), codecPrivate.size());
+    return true;
+}
+
 // The param data contains the first frame data, starting with the uncompressed frame
 // header. This uncompressed header (refer section 6.2 of the VP9 bitstream spec) is
 // used to parse profile, bitdepth and subsampling.
-bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size) {
+bool MakeVP9CodecSpecificDataFromFirstFrame(AMediaFormat* meta, const uint8_t* data, size_t size) {
     if (meta == nullptr || data == nullptr || size == 0) {
         return false;
     }
@@ -227,29 +271,29 @@
     if (chromaSubsampling != -1) {
         csdSize += 3;
     }
+    // As level is not present in first frame build CodecPrivate without it.
+    return MakeVP9CodecPrivate(meta, profile, -1, bitDepth, chromaSubsampling);
+}
 
-    // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed
-    // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
-    sp<ABuffer> csd = sp<ABuffer>::make(csdSize);
-    uint8_t* csdData = csd->data();
-
-    *csdData++ = 0x01 /* FEATURE PROFILE */;
-    *csdData++ = 0x01 /* length */;
-    *csdData++ = profile;
-
-    *csdData++ = 0x03 /* FEATURE BITDEPTH */;
-    *csdData++ = 0x01 /* length */;
-    *csdData++ = bitDepth;
-
-    // csdSize more than 6 means chroma subsampling data was found.
-    if (csdSize > 6) {
-        *csdData++ = 0x04 /* FEATURE SUBSAMPLING */;
-        *csdData++ = 0x01 /* length */;
-        *csdData++ = chromaSubsampling;
+bool MakeVP9CodecPrivateFromVpcC(AMediaFormat* meta, const uint8_t* csdData, size_t size) {
+    if (meta == nullptr || csdData == nullptr || size < 12) {
+        return false;
     }
 
-    AMediaFormat_setBuffer(meta, AMEDIAFORMAT_KEY_CSD_0, csd->data(), csd->size());
-    return true;
+    // Check the first 4 bytes (VersionAndFlags) if they match the required value.
+    if (csdData[0] != 0x01 || csdData[1] != 0x00 || csdData[2] != 0x00 || csdData[3] != 0x00) {
+        return false;
+    }
+
+    // Create VP9 Codec Feature Metadata (CodecPrivate) that can be parsed.
+    // https://www.webmproject.org/docs/container/#vp9-codec-feature-metadata-codecprivate
+    const uint8_t* vpcCData = csdData + 4;  // Skip the first 4 bytes (VersionAndFlags)
+
+    int32_t profile = vpcCData[0];
+    int32_t level = vpcCData[1];
+    int32_t bitDepth = (vpcCData[2] >> 4) & 0x0F;           // Bit Depth (4 bits).
+    int32_t chromaSubsampling = (vpcCData[2] >> 1) & 0x07;  // Chroma Subsampling (3 bits).
+    return MakeVP9CodecPrivate(meta, profile, level, bitDepth, chromaSubsampling);
 }
 
 bool MakeAACCodecSpecificData(MetaDataBase &meta, const uint8_t *data, size_t size) {
diff --git a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
index 69cf21a..9988544 100644
--- a/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
+++ b/media/module/metadatautils/include/media/stagefright/MetaDataUtils.h
@@ -38,7 +38,10 @@
 void parseVorbisComment(
         AMediaFormat *fileMeta, const char *comment, size_t commentLength);
 
-bool MakeVP9CodecSpecificData(AMediaFormat* meta, const uint8_t* data, size_t size);
+bool MakeVP9CodecSpecificData(AMediaFormat* meta, int32_t csdSize, int32_t profile, int32_t level,
+                              int32_t bitDepth, int32_t chromaSubsampling);
+bool MakeVP9CodecSpecificDataFromFirstFrame(AMediaFormat* meta, const uint8_t* data, size_t size);
+bool MakeVP9CodecPrivateFromVpcC(AMediaFormat* meta, const uint8_t* data, size_t size);
 
 }  // namespace android
 
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 979edab..26e5ddf 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -20,7 +20,6 @@
 #include <dirent.h>
 #include <errno.h>
 #include <fcntl.h>
-#include <memory>
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
@@ -298,9 +297,11 @@
 
 void MtpFfsHandle::close() {
     // Join all child threads before destruction
-    for (auto& thread : mChildThreads) {
-        thread.join();
+    int count = mChildThreads.size();
+    for (int i = 0; i < count; i++) {
+        mChildThreads[i].join();
     }
+    mChildThreads.clear();
 
     io_destroy(mCtx);
     closeEndpoints();
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/BlockModelDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/BlockModelDecoder.java
new file mode 100644
index 0000000..3b3640e
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/BlockModelDecoder.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.benchmark.library;
+
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import com.android.media.benchmark.library.Decoder;
+
+public class BlockModelDecoder extends Decoder {
+    private static final String TAG = BlockModelDecoder.class.getSimpleName();
+    private final boolean DEBUG = false;
+    protected final LinearBlockWrapper mLinearInputBlock = new LinearBlockWrapper();
+
+    /**
+     * Wrapper class for {@link MediaCodec.LinearBlock}
+     */
+    public static class LinearBlockWrapper {
+        private MediaCodec.LinearBlock mBlock;
+        private ByteBuffer mBuffer;
+        private int mOffset;
+
+        public MediaCodec.LinearBlock getBlock() {
+            return mBlock;
+        }
+
+        public ByteBuffer getBuffer() {
+            return mBuffer;
+        }
+
+        public int getBufferCapacity() {
+            return mBuffer == null ? 0 : mBuffer.capacity();
+        }
+
+        public int getOffset() {
+            return mOffset;
+        }
+
+        public void setOffset(int size) {
+            mOffset = size;
+        }
+
+        public boolean allocateBlock(String codec, int size) throws RuntimeException{
+            recycle();
+            mBlock = MediaCodec.LinearBlock.obtain(size, new String[]{codec});
+            if (mBlock == null || !mBlock.isMappable()) {
+                throw new RuntimeException("Linear Block not allocated/mapped");
+            }
+            mBuffer = mBlock.map();
+            mOffset = 0;
+            return true;
+        }
+
+        public void recycle() {
+            if (mBlock != null) {
+                mBlock.recycle();
+                mBlock = null;
+            }
+            mBuffer = null;
+            mOffset = 0;
+        }
+    }
+
+    public BlockModelDecoder() {
+        // empty
+    }
+
+    public void tearDown() {
+        mLinearInputBlock.recycle();
+
+    }
+
+    /**
+     * Decodes the given input buffer,
+     * provided valid list of buffer info and format are passed as inputs.
+     *
+     * @param inputBuffer     Decode the provided list of ByteBuffers
+     * @param inputBufferInfo List of buffer info corresponding to provided input buffers
+     * @param asyncMode       Will run on async implementation if true
+     * @param format          For creating the decoder if codec name is empty and configuring it
+     * @param codecName       Will create the decoder with codecName
+     * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+     *         DECODE_CREATE_ERROR for decoder not created
+     * @throws IOException if the codec cannot be created.
+     */
+    @Override
+    public int decode(@NonNull List<ByteBuffer> inputBuffer,
+        @NonNull List<MediaCodec.BufferInfo> inputBufferInfo, final boolean asyncMode,
+        @NonNull MediaFormat format, String codecName)
+        throws IOException, InterruptedException {
+        setExtraConfigureFlags(MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL);
+        return super.decode(inputBuffer, inputBufferInfo, asyncMode, format, codecName);
+    }
+
+    @Override
+    protected void onInputAvailable(int inputBufferId, MediaCodec mediaCodec) {
+        if (mNumInFramesProvided >= mNumInFramesRequired) {
+            mIndex = mInputBufferInfo.size() - 1;
+        }
+        MediaCodec.BufferInfo bufInfo = mInputBufferInfo.get(mIndex);
+        mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+        if (mLinearInputBlock.getOffset() + bufInfo.size > mLinearInputBlock.getBufferCapacity()) {
+            int requestSize = 8192;
+            requestSize = Math.max(bufInfo.size, requestSize);
+            mLinearInputBlock.allocateBlock(mediaCodec.getCanonicalName(), requestSize);
+        }
+        int codecFlags = 0;
+        if ((bufInfo.flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+            codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+        }
+        if ((bufInfo.flags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+            codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+        }
+        codecFlags |= mSawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0;
+        if (DEBUG) {
+            Log.v(TAG, "input: id: " + inputBufferId
+                    + " size: " + bufInfo.size
+                    + " pts: " + bufInfo.presentationTimeUs
+                    + " flags: " + codecFlags);
+        }
+        mLinearInputBlock.getBuffer().put(mInputBuffer.get(mIndex).array());
+        mNumInFramesProvided++;
+        mIndex = mNumInFramesProvided % (mInputBufferInfo.size() - 1);
+        if (mSawInputEOS) {
+            Log.i(TAG, "Saw Input EOS");
+        }
+        mStats.addFrameSize(bufInfo.size);
+        MediaCodec.QueueRequest request = mCodec.getQueueRequest(inputBufferId);
+        request.setLinearBlock(mLinearInputBlock.getBlock(), mLinearInputBlock.getOffset(),
+                bufInfo.size);
+        request.setPresentationTimeUs(bufInfo.presentationTimeUs);
+        request.setFlags(codecFlags);
+        request.queue();
+        if (bufInfo.size > 0 && (codecFlags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG
+                | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
+            mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufInfo.size);
+        }
+    }
+
+    @Override
+    protected void onOutputAvailable(
+            MediaCodec mediaCodec, int outputBufferId, MediaCodec.BufferInfo outputBufferInfo) {
+        if (mSawOutputEOS || outputBufferId < 0) {
+            return;
+        }
+        mNumOutputFrame++;
+        if (DEBUG) {
+            Log.d(TAG,
+                    "In OutputBufferAvailable ,"
+                            + " output frame number = " + mNumOutputFrame
+                            + " timestamp = " + outputBufferInfo.presentationTimeUs
+                            + " size = " + outputBufferInfo.size);
+        }
+        MediaCodec.OutputFrame outFrame = mediaCodec.getOutputFrame(outputBufferId);
+        ByteBuffer outputBuffer = null;
+        try {
+            if (outFrame.getLinearBlock() != null) {
+                outputBuffer = outFrame.getLinearBlock().map();
+            }
+        } catch(IllegalStateException e) {
+            // buffer may not be linear, this is ok
+            // as we are handling non-linear buffers below.
+        }
+        if (mOutputStream != null) {
+            try {
+                if (outputBuffer != null) {
+                    byte[] bytesOutput = new byte[outputBuffer.remaining()];
+                    outputBuffer.get(bytesOutput);
+                    mOutputStream.write(bytesOutput);
+                }
+            } catch (IOException e) {
+                e.printStackTrace();
+                Log.d(TAG, "Error Dumping File: Exception " + e.toString());
+            }
+        }
+        ByteBuffer copiedBuffer = null;
+        int bytesRemaining = 0;
+        if (outputBuffer != null) {
+            bytesRemaining = outputBuffer.remaining();
+            if (mIBufferSend != null) {
+                copiedBuffer = ByteBuffer.allocate(outputBuffer.remaining());
+                copiedBuffer.put(outputBuffer);
+            }
+            outFrame.getLinearBlock().recycle();
+            outputBuffer = null;
+        }
+        if (mFrameReleaseQueue != null) {
+            if (mMime.startsWith("audio/")) {
+                try {
+                    mFrameReleaseQueue.pushFrame(outputBufferId, bytesRemaining);
+                } catch (Exception e) {
+                    Log.d(TAG, "Error in getting MediaCodec buffer" + e.toString());
+                }
+            } else {
+                mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
+                                                outputBufferInfo.presentationTimeUs);
+            }
+
+        } else if (mIBufferSend != null) {
+            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+            // TODO: may be inefficient;
+            info.buf = copiedBuffer;
+            info.idx = outputBufferId;
+            info.obj = mediaCodec;
+            info.bytesRead = outputBufferInfo.size;
+            info.presentationTimeUs = outputBufferInfo.presentationTimeUs;
+            info.flag = outputBufferInfo.flags;
+            mIBufferSend.sendBuffer(this, info);
+        } else {
+            mediaCodec.releaseOutputBuffer(outputBufferId, mRender);
+        }
+        mSawOutputEOS = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+        if (DEBUG && mSawOutputEOS) {
+            Log.i(TAG, "Saw output EOS");
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
index f223242..031817b 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
@@ -78,4 +78,21 @@
         }
         return null;
     }
+    /**
+     * Returns compression ratio for a given mediaType.
+     * @param mediaType mime type for which compression ratio is to be returned.
+     */
+    public static float getCompressionRatio(String mediaType) {
+        switch (mediaType) {
+            case MediaFormat.MIMETYPE_AUDIO_FLAC:
+                return 0.7f;
+            case MediaFormat.MIMETYPE_AUDIO_G711_MLAW:
+            case MediaFormat.MIMETYPE_AUDIO_G711_ALAW:
+            case MediaFormat.MIMETYPE_AUDIO_MSGSM:
+                return 0.5f;
+            case MediaFormat.MIMETYPE_AUDIO_RAW:
+                return 1.0f;
+        }
+        return 0.1f;
+    }
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index e9b337d..2ea0ed2 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -18,6 +18,7 @@
 
 import android.view.Surface;
 
+import android.media.AudioFormat;
 import android.media.MediaCodec;
 import android.media.MediaCodec.BufferInfo;
 import android.media.MediaFormat;
@@ -42,6 +43,7 @@
 
     protected final Object mLock = new Object();
     protected MediaCodec mCodec;
+    protected int mExtraFlags = 0;
     protected Surface mSurface = null;
     protected boolean mRender = false;
     protected ArrayList<BufferInfo> mInputBufferInfo;
@@ -58,6 +60,8 @@
     protected int mNumOutputFrame;
     protected int mIndex;
 
+    protected boolean mUseFrameReleaseQueue = false;
+
     protected ArrayList<ByteBuffer> mInputBuffer;
     protected FileOutputStream mOutputStream;
     protected FrameReleaseQueue mFrameReleaseQueue = null;
@@ -85,6 +89,11 @@
         mIBufferSend = receiver;
         return true;
     }
+
+    public void setExtraConfigureFlags(int flags) {
+        this.mExtraFlags = flags;
+    }
+
     /**
      * Setup of decoder
      *
@@ -94,17 +103,32 @@
         mSignalledError = false;
         mOutputStream = outputStream;
     }
+
+    /*
+     * This can be used to setup audio decoding, simulating audio playback.
+     */
+    public void setupDecoder(
+            boolean render, boolean useFrameReleaseQueue, int numInFramesRequired) {
+        mRender = render;
+        mUseFrameReleaseQueue = useFrameReleaseQueue;
+        mNumInFramesRequired = numInFramesRequired;
+        mSignalledError = false;
+        setupDecoder(null);
+    }
+
     public void setupDecoder(Surface surface, boolean render,
             boolean useFrameReleaseQueue, int frameRate) {
         setupDecoder(surface, render, useFrameReleaseQueue, frameRate, -1);
     }
+
     public void setupDecoder(Surface surface, boolean render,
             boolean useFrameReleaseQueue, int frameRate, int numInFramesRequired) {
         mSignalledError = false;
         mOutputStream = null;
         mSurface = surface;
         mRender = render;
-        if (useFrameReleaseQueue) {
+        mUseFrameReleaseQueue = useFrameReleaseQueue;
+        if (mUseFrameReleaseQueue) {
             Log.i(TAG, "Using FrameReleaseQueue with frameRate " + frameRate);
             mFrameReleaseQueue = new FrameReleaseQueue(mRender, frameRate);
         }
@@ -166,6 +190,18 @@
         public void onOutputFormatChanged(
                 @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
             Log.i(TAG, "Output format changed. Format: " + format.toString());
+            if (mUseFrameReleaseQueue
+                    && mFrameReleaseQueue == null && mMime.startsWith("audio/")) {
+                // start a frame release thread for this configuration.
+                int bytesPerSample = AudioFormat.getBytesPerSample(
+                        format.getInteger(MediaFormat.KEY_PCM_ENCODING,
+                                AudioFormat.ENCODING_PCM_16BIT));
+                int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+                int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+                mFrameReleaseQueue = new FrameReleaseQueue(
+                        mRender, sampleRate, channelCount, bytesPerSample);
+                mFrameReleaseQueue.setMediaCodec(mCodec);
+            }
         }
 
         @Override
@@ -223,11 +259,10 @@
         if (asyncMode) {
             setCallback(mCodec);
         }
-        int isEncoder = 0;
         if (DEBUG) {
             Log.d(TAG, "Media Format : " + format.toString());
         }
-        mCodec.configure(format, mSurface, null, isEncoder);
+        mCodec.configure(format, mSurface, null, mExtraFlags);
 
         mCodec.start();
         Log.i(TAG, "Codec started async mode ?  " + asyncMode);
@@ -395,8 +430,17 @@
             }
         }
         if (mFrameReleaseQueue != null) {
-            mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
-                                            outputBufferInfo.presentationTimeUs);
+            if (mMime.startsWith("audio/")) {
+                try {
+                    ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferId);
+                    mFrameReleaseQueue.pushFrame(outputBufferId, outputBuffer.remaining());
+                } catch (Exception e) {
+                    Log.d(TAG, "Error in getting MediaCodec buffer" + e.toString());
+                }
+            } else {
+                mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
+                                                outputBufferInfo.presentationTimeUs);
+            }
         } else if (mIBufferSend != null) {
             IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
             info.buf = mediaCodec.getOutputBuffer(outputBufferId);
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
index 20a2573..0861c2c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -29,45 +29,89 @@
 
 public class FrameReleaseQueue {
     private static final String TAG = "FrameReleaseQueue";
+    private static final boolean DEBUG = false;
     private final String MIME_AV1 = "video/av01";
     private final int AV1_SUPERFRAME_DELAY = 6;
     private final int THRESHOLD_TIME = 5;
 
+    private final long HOUR_IN_MS = (60 * 60 * 1000L);
+    private final long MINUTE_IN_MS = (60 * 1000L);
+
     private MediaCodec mCodec;
     private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
     private ReleaseThread mReleaseThread;
     private AtomicBoolean doFrameRelease = new AtomicBoolean(false);
-    private boolean mReleaseJobStarted = false;
+    private AtomicBoolean mReleaseJobStarted = new AtomicBoolean(false);
     private boolean mRender = false;
-    private int mWaitTime = 40; // milliseconds per frame
+    private long mWaitTime = 40; // milliseconds per frame
     private int mWaitTimeCorrection = 0;
     private int mCorrectionLoopCount;
-    private int firstReleaseTime = -1;
-    private int mAllowedDelayTime = THRESHOLD_TIME;
+    protected long firstReleaseTime = -1;
+    private long mAllowedDelayTime = THRESHOLD_TIME;
     private int mFrameDelay = 0;
     private final ScheduledExecutorService mScheduler = Executors.newScheduledThreadPool(1);
 
+    public FrameReleaseQueue(boolean render, int frameRate) {
+        this.mFrameInfoQueue = new LinkedBlockingQueue();
+        this.mReleaseThread = new ReleaseThread();
+        this.doFrameRelease.set(true);
+        this.mRender = render;
+        this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
+        int waitTimeRemainder = 1000 % frameRate;
+        int gcd = gcd(frameRate, waitTimeRemainder);
+        this.mCorrectionLoopCount = frameRate / gcd;
+        this.mWaitTimeCorrection = waitTimeRemainder / gcd;
+        Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
+    }
+
+    public FrameReleaseQueue(boolean render, int sampleRate, int nChannels, int bytesPerChannel) {
+        this.mFrameInfoQueue = new LinkedBlockingQueue();
+        this.doFrameRelease.set(true);
+        this.mRender = render;
+        this.mReleaseThread = new AudioRendererThread(sampleRate, nChannels, bytesPerChannel);
+    }
 
     private static class FrameInfo {
         private int number;
         private int bufferId;
         private int displayTime;
+        private int bytes;
         public FrameInfo(int frameNumber, int frameBufferId, int frameDisplayTime) {
             this.number = frameNumber;
             this.bufferId = frameBufferId;
             this.displayTime = frameDisplayTime;
         }
+        public FrameInfo(int frameBufferId, int bytes) {
+            this.bufferId = frameBufferId;
+            this.bytes = bytes;
+        }
     }
 
     private class ReleaseThread extends Thread {
         private int mLoopCount = 0;
-        private int mNextReleaseTime = 0;
+        private long mNextReleaseTime = 0;
+
+        protected void printPlaybackTime() {
+            if (firstReleaseTime == -1) {
+                Log.d(TAG, "Playback Time not initialized");
+                return;
+            }
+            long curTime = getCurSysTime() - firstReleaseTime;
+            long hours = curTime / (HOUR_IN_MS);
+            curTime -= (hours * HOUR_IN_MS);
+            long min = curTime / MINUTE_IN_MS;
+            curTime -= (min * MINUTE_IN_MS);
+            Log.d(TAG, "Playback time: "
+                    + hours + "h "
+                    + min + "m "
+                    + (double)(curTime / (double)1000) +"s");
+        }
 
         @SuppressWarnings("FutureReturnValueIgnored")
         public void run() {
             /* Check if the release thread wakes up too late */
             if (mLoopCount != 0) {
-                int delta = getCurSysTime() - mNextReleaseTime;
+                long delta = getCurSysTime() - mNextReleaseTime;
                 if (delta >= THRESHOLD_TIME) {
                     Log.d(TAG, "Release thread wake up late by " + delta);
                     /* For accidental late wake up, we should relax the timestamp
@@ -93,8 +137,8 @@
                         popAndRelease(false);
                     } else {
                         mNextReleaseTime += mWaitTime;
-                        int curSysTime = getCurSysTime();
-                        int curMediaTime = curSysTime - firstReleaseTime;
+                        long curSysTime = getCurSysTime();
+                        long curMediaTime = curSysTime - firstReleaseTime;
                         while (curFrameInfo != null && curFrameInfo.displayTime > 0 &&
                                 curFrameInfo.displayTime <= curMediaTime) {
                             if (!((curMediaTime - curFrameInfo.displayTime) <= mAllowedDelayTime)) {
@@ -123,21 +167,86 @@
                     mNextReleaseTime += mWaitTimeCorrection;
                 }
                 mLoopCount += 1;
+            } else {
+                mReleaseJobStarted.set(false);
             }
         }
     }
 
-    public FrameReleaseQueue(boolean render, int frameRate) {
-        this.mFrameInfoQueue = new LinkedBlockingQueue();
-        this.mReleaseThread = new ReleaseThread();
-        this.doFrameRelease.set(true);
-        this.mRender = render;
-        this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
-        int waitTimeRemainder = 1000 % frameRate;
-        int gcd = gcd(frameRate, waitTimeRemainder);
-        this.mCorrectionLoopCount = frameRate / gcd;
-        this.mWaitTimeCorrection = waitTimeRemainder / gcd;
-        Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
+    private class AudioRendererThread extends ReleaseThread {
+        private final int WAIT_FOR_BUFFER_IN_SEC = 2;
+        private double mTimeAdjustMs = 0;
+        private double mMsForByte = 0;
+        private double mExpectedWakeUpTime = 0;
+        private FrameInfo mCurrentFrameInfo;
+
+        AudioRendererThread(int sampleRate, int nChannels, int bytesPerChannel) {
+            if (DEBUG) {
+                Log.d(TAG, "sampleRate " + sampleRate
+                        + " nChannels " + nChannels
+                        + " bytesPerChannel " + bytesPerChannel);
+            }
+            this.mMsForByte = 1000 / (double)(sampleRate * nChannels * bytesPerChannel);
+        }
+
+        @Override
+        @SuppressWarnings("FutureReturnValueIgnored")
+        public void run() {
+            long curTime = getCurSysTime();
+            if (DEBUG) {
+                if (firstReleaseTime == -1) {
+                    firstReleaseTime = curTime;
+                }
+                printPlaybackTime();
+            }
+            if (mMsForByte == 0) {
+                Log.e(TAG, "Audio rendering not possible, no valid params");
+                return;
+            }
+            if (mCurrentFrameInfo != null) {
+                try {
+                    mCodec.releaseOutputBuffer(mCurrentFrameInfo.bufferId, mRender);
+                } catch (IllegalStateException e) {
+                    doFrameRelease.set(false);
+                    Log.e(TAG, "Threw InterruptedException on releaseOutputBuffer");
+                } finally {
+                    mCurrentFrameInfo = null;
+                }
+            }
+            boolean requestedSchedule = false;
+            try {
+                while (doFrameRelease.get() || mFrameInfoQueue.size() > 0) {
+                    mCurrentFrameInfo = mFrameInfoQueue.poll(
+                            WAIT_FOR_BUFFER_IN_SEC, TimeUnit.SECONDS);
+                    if (mCurrentFrameInfo != null) {
+                        mTimeAdjustMs = 0;
+                        if (mExpectedWakeUpTime != 0) {
+                            mTimeAdjustMs = mExpectedWakeUpTime - getCurSysTime();
+                        }
+                        double sleepTimeUs =
+                                (mMsForByte * mCurrentFrameInfo.bytes + mTimeAdjustMs) * 1000;
+                        mExpectedWakeUpTime = getCurSysTime() + (sleepTimeUs / 1000);
+                        if (DEBUG) {
+                            Log.d(TAG, " mExpectedWakeUpTime " + mExpectedWakeUpTime
+                                + " Waiting for " + (long)(sleepTimeUs) + "us"
+                                + " Now " + getCurSysTime()
+                                + " bytes " + mCurrentFrameInfo.bytes
+                                + " bufferID " + mCurrentFrameInfo.bufferId);
+                        }
+                        mScheduler.schedule(
+                                mReleaseThread,(long)(sleepTimeUs),TimeUnit.MICROSECONDS);
+                        requestedSchedule = true;
+                        break;
+                    }
+                }
+            } catch(InterruptedException e) {
+                Log.d(TAG, "Interrupted during poll wait");
+                doFrameRelease.set(false);
+            }
+            if (!requestedSchedule) {
+                mReleaseJobStarted.set(false);
+            }
+        }
     }
 
     private static int gcd(int a, int b) {
@@ -154,6 +263,19 @@
         }
     }
 
+    public boolean pushFrame(int frameBufferId, int bytes) {
+        FrameInfo info = new FrameInfo(frameBufferId, bytes);
+        boolean pushSuccess = mFrameInfoQueue.offer(info);
+        if (!pushSuccess) {
+            Log.e(TAG, "Failed to push frame with buffer id " + info.bufferId);
+            return false;
+        }
+        if (!mReleaseJobStarted.get()) {
+            mScheduler.execute(mReleaseThread);
+            mReleaseJobStarted.set(true);
+        }
+        return true;
+    }
     public boolean pushFrame(int frameNumber, int frameBufferId, long frameDisplayTime) {
         int frameDisplayTimeMs = (int)(frameDisplayTime/1000);
         FrameInfo curFrameInfo = new FrameInfo(frameNumber, frameBufferId, frameDisplayTimeMs);
@@ -163,16 +285,16 @@
             return false;
         }
 
-        if (!mReleaseJobStarted && frameNumber >= mFrameDelay) {
+        if (!mReleaseJobStarted.get() && frameNumber >= mFrameDelay) {
             mScheduler.execute(mReleaseThread);
-            mReleaseJobStarted = true;
+            mReleaseJobStarted.set(true);
             Log.i(TAG, "Started frame release thread");
         }
         return true;
     }
 
-    private int getCurSysTime() {
-        return (int)(System.nanoTime()/1000000);
+    private long getCurSysTime() {
+        return (long)(System.nanoTime() / 1000000L);
     }
 
     @SuppressWarnings("FutureReturnValueIgnored")
@@ -196,7 +318,7 @@
 
     public void stopFrameRelease() {
         doFrameRelease.set(false);
-        while (mFrameInfoQueue.size() > 0) {
+        while (mReleaseJobStarted.get()) {
             try {
                 TimeUnit.SECONDS.sleep(1);
             } catch (InterruptedException e) {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
index c97a35c..bbc3d48 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
@@ -16,6 +16,8 @@
 
 package com.android.media.benchmark.library;
 import android.media.MediaCodec;
+
+import java.util.ArrayDeque;
 import java.nio.ByteBuffer;
 /**
  * interfaces that can be used to implement
@@ -26,10 +28,11 @@
       public ByteBuffer buf;
       public int idx;
       public Object obj;
-      int flag;
-      int bytesRead;
-      boolean isComplete = true;
-      long presentationTimeUs;
+      public ArrayDeque<MediaCodec.BufferInfo> infos;
+      public int flag;
+      public int bytesRead;
+      public boolean isComplete = true;
+      public long presentationTimeUs;
   }
 
   public interface IReceiveBuffer {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
index 3e6cee1..c68ac8a 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
@@ -96,10 +96,10 @@
                       pBuf.info.buf.remaining() +" C:" + cBuf.info.buf.remaining());
               }
           }
+          cBuf.info.infos = pBuf.info.infos;
           cBuf.info.bytesRead = bytesRead;
           cBuf.info.presentationTimeUs = pBuf.info.presentationTimeUs;
           cBuf.info.flag = pBuf.info.flag;
-
           if (pBuf.rIface != null) {
               pBuf.rIface.receiveBuffer(pBuf.info);
           }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
new file mode 100644
index 0000000..ed2defe
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitBlockModelDecoder.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.benchmark.library;
+
+import android.media.AudioFormat;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import java.util.ArrayDeque;
+import java.util.Iterator;
+import java.util.List;
+
+import com.android.media.benchmark.library.CodecUtils;
+import com.android.media.benchmark.library.BlockModelDecoder;
+
+public class MultiAccessUnitBlockModelDecoder extends BlockModelDecoder {
+	private static final String TAG = MultiAccessUnitBlockModelDecoder.class.getSimpleName();
+    private final ArrayDeque<MediaCodec.BufferInfo> mInputInfos = new ArrayDeque<>();
+    private final boolean DEBUG = false;
+    protected int mMaxInputSize = 0;
+
+    public MultiAccessUnitBlockModelDecoder() {
+    	// empty
+    }
+
+    /**
+     * Decodes the given input buffer,
+     * provided valid list of buffer info and format are passed as inputs.
+     *
+     * @param inputBuffer     Decode the provided list of ByteBuffers
+     * @param inputBufferInfo List of buffer info corresponding to provided input buffers
+     * @param asyncMode       Will run on async implementation if true
+     * @param format          For creating the decoder if codec name is empty and configuring it
+     * @param codecName       Will create the decoder with codecName
+     * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+     *         DECODE_CREATE_ERROR for decoder not created
+     * @throws IOException if the codec cannot be created.
+     */
+    @Override
+    public int decode(@NonNull List<ByteBuffer> inputBuffer,
+        @NonNull List<MediaCodec.BufferInfo> inputBufferInfo, final boolean asyncMode,
+        @NonNull MediaFormat format, String codecName)
+        throws IOException, InterruptedException {
+        setExtraConfigureFlags(MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL);
+        configureMaxInputSize(format);
+        return super.decode(inputBuffer, inputBufferInfo, asyncMode, format, codecName);
+    }
+
+    protected void configureMaxInputSize(MediaFormat format) {
+        final String mime = format.getString(MediaFormat.KEY_MIME);
+        final int maxOutputSize = format.getNumber(
+            MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+        final int maxInputSizeInBytes = format.getInteger(
+                MediaFormat.KEY_MAX_INPUT_SIZE);
+        mMaxInputSize = Math.max(maxInputSizeInBytes,
+                (int) (maxOutputSize * CodecUtils.getCompressionRatio(mime)));
+    }
+
+    @Override
+    public void setCallback(MediaCodec codec) {
+        mCodec.setCallback(new MediaCodec.Callback() {
+            boolean isUsingLargeFrameMode = false;
+
+            @Override
+            public void onInputBufferAvailable(
+                    @NonNull MediaCodec mediaCodec, int inputBufferId) {
+                try {
+                    mStats.addInputTime();
+                    if (isUsingLargeFrameMode) {
+                        onInputsAvailable(inputBufferId, mediaCodec);
+                    } else {
+                        onInputAvailable(inputBufferId, mediaCodec);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                    Log.e(TAG, e.toString());
+                }
+            }
+
+            @Override
+            public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec,
+                    int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
+                mStats.addOutputTime();
+                onOutputAvailable(mediaCodec, outputBufferId, bufferInfo);
+                if (mSawOutputEOS) {
+                    synchronized (mLock) { mLock.notify(); }
+                }
+            }
+
+            @Override
+            public void onOutputBuffersAvailable(
+                    @NonNull MediaCodec mediaCodec,
+                            int outputBufferId, @NonNull ArrayDeque<MediaCodec.BufferInfo> infos) {
+                int i = 0;
+                while(i++ < infos.size()) {
+                    mStats.addOutputTime();
+                }
+                onOutputsAvailable(mediaCodec, outputBufferId, infos);
+                if (mSawOutputEOS) {
+                    synchronized (mLock) { mLock.notify(); }
+                }
+            }
+
+            @Override
+            public void onOutputFormatChanged(
+                    @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
+                Log.i(TAG, "Output format changed. Format: " + format.toString());
+                final int maxOutputSize = format.getNumber(
+                            MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+                isUsingLargeFrameMode = (maxOutputSize > 0);
+                configureMaxInputSize(format);
+                if (mUseFrameReleaseQueue && mFrameReleaseQueue == null) {
+                    int bytesPerSample = AudioFormat.getBytesPerSample(
+                            format.getInteger(MediaFormat.KEY_PCM_ENCODING,
+                                    AudioFormat.ENCODING_PCM_16BIT));
+                    int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+                    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+                    mFrameReleaseQueue = new FrameReleaseQueue(
+                            mRender, sampleRate, channelCount, bytesPerSample);
+                    mFrameReleaseQueue.setMediaCodec(mCodec);
+                }
+            }
+
+            @Override
+            public void onError(
+                    @NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+                mSignalledError = true;
+                Log.e(TAG, "Codec Error: " + e.toString());
+                e.printStackTrace();
+                synchronized (mLock) { mLock.notify(); }
+            }
+        });
+
+    }
+
+    protected void onInputsAvailable(int inputBufferId, MediaCodec mediaCodec) {
+        if (inputBufferId >= 0) {
+            mLinearInputBlock.allocateBlock(mediaCodec.getCanonicalName(), mMaxInputSize);
+            MediaCodec.BufferInfo bufInfo;
+            mInputInfos.clear();
+            int offset = 0;
+            while (mNumInFramesProvided < mNumInFramesRequired) {
+                bufInfo = mInputBufferInfo.get(mIndex);
+                mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+                int bufferSizeNeeded = mLinearInputBlock.getOffset() + bufInfo.size;
+                if (bufferSizeNeeded > mLinearInputBlock.getBufferCapacity()) {
+                    break;
+                }
+                mLinearInputBlock.getBuffer().put(mInputBuffer.get(mIndex).array());
+                mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufInfo.size);
+                bufInfo.offset = offset; offset += bufInfo.size;
+                mInputInfos.add(bufInfo);
+                mNumInFramesProvided++;
+                mIndex = mNumInFramesProvided % (mInputBufferInfo.size() - 1);
+
+            }
+            if (DEBUG) {
+                Log.d(TAG, "inputsAvailable ID : " + inputBufferId
+                        + " queued info size: " + mInputInfos.size()
+                        + " Total queued size: " + offset);
+            }
+            if (mNumInFramesProvided >= mNumInFramesRequired) {
+                mIndex = mInputBufferInfo.size() - 1;
+                bufInfo = mInputBufferInfo.get(mIndex);
+                int bufferSizeNeeded = mLinearInputBlock.getOffset() + bufInfo.size;
+                if (bufferSizeNeeded <= mLinearInputBlock.getBufferCapacity()) {
+                    if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
+                        Log.e(TAG, "Error in EOS flag for Decoder");
+                    }
+                    mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+                    mLinearInputBlock.getBuffer().put(mInputBuffer.get(mIndex).array());
+                    mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufInfo.size);
+                    bufInfo.offset = offset; offset += bufInfo.size;
+                    //bufInfo.flags = codecFlags;
+                    mInputInfos.add(bufInfo);
+                    mNumInFramesProvided++;
+                }
+            }
+            if (mInputInfos.size() == 0) {
+                Log.d(TAG, " No inputs to queue");
+            } else {
+                mStats.addFrameSize(offset);
+                MediaCodec.QueueRequest request = mediaCodec.getQueueRequest(inputBufferId);
+                request.setMultiFrameLinearBlock(mLinearInputBlock.getBlock(), mInputInfos);
+                request.queue();
+            }
+        }
+    }
+
+    protected void onOutputsAvailable(MediaCodec mediaCodec, int outputBufferId,
+            ArrayDeque<MediaCodec.BufferInfo> infos) {
+        if (mSawOutputEOS || outputBufferId < 0) {
+            return;
+        }
+        MediaCodec.OutputFrame outFrame = mediaCodec.getOutputFrame(outputBufferId);
+        ByteBuffer outputBuffer = null;
+        try {
+            if (outFrame.getLinearBlock() != null) {
+                outputBuffer = outFrame.getLinearBlock().map();
+            }
+        } catch(IllegalStateException e) {
+            // buffer may not be linear, this is ok
+            // as we are handling non-linear buffers below.
+        }
+        if (mOutputStream != null) {
+            try {
+                if (outputBuffer != null) {
+                    byte[] bytesOutput = new byte[outputBuffer.remaining()];
+                    outputBuffer.get(bytesOutput);
+                    mOutputStream.write(bytesOutput);
+                    if (DEBUG) {
+                        Log.d(TAG, "Received outputs buffer size : " + outputBuffer.remaining()
+                                + " infos size " + infos.size());
+                    }
+                }
+            } catch (IOException e) {
+                e.printStackTrace();
+                Log.d(TAG, "Error Dumping File: Exception " + e.toString());
+            }
+        }
+        mNumOutputFrame += infos.size();
+        MediaCodec.BufferInfo last = infos.peekLast();
+        if (last != null) {
+            mSawOutputEOS |= ((last.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0);
+        }
+        int bytesRemaining = 0;
+        if (outputBuffer != null) {
+            bytesRemaining = outputBuffer.remaining();
+            outFrame.getLinearBlock().recycle();
+            outputBuffer = null;
+        }
+        if (mFrameReleaseQueue != null) {
+            mFrameReleaseQueue.pushFrame(outputBufferId, bytesRemaining);
+        } else if (mIBufferSend == null) {
+            mediaCodec.releaseOutputBuffer(outputBufferId, mRender);
+        }
+        if (mSawOutputEOS) {
+            Log.i(TAG, "Large frame - saw output EOS");
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
index cb92f06..fd8859b 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
@@ -18,6 +18,7 @@
 
 import android.view.Surface;
 
+import android.media.AudioFormat;
 import android.media.MediaCodec;
 import android.media.MediaCodec.BufferInfo;
 import android.media.MediaFormat;
@@ -91,8 +92,18 @@
                     @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
                 Log.i(TAG, "Output format changed. Format: " + format.toString());
                 final int maxOutputSize = format.getNumber(
-                        MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+                            MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
                 isUsingLargeFrameMode = (maxOutputSize > 0);
+                if (mUseFrameReleaseQueue && mFrameReleaseQueue == null) {
+                    int bytesPerSample = AudioFormat.getBytesPerSample(
+                            format.getInteger(MediaFormat.KEY_PCM_ENCODING,
+                                    AudioFormat.ENCODING_PCM_16BIT));
+                    int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+                    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+                    mFrameReleaseQueue = new FrameReleaseQueue(
+                            mRender, sampleRate, channelCount, bytesPerSample);
+                    mFrameReleaseQueue.setMediaCodec(mCodec);
+                }
             }
 
             @Override
@@ -177,30 +188,6 @@
         if (mSawOutputEOS || outputBufferId < 0) {
             return;
         }
-        Iterator<BufferInfo> iter = infos.iterator();
-        while (iter.hasNext()) {
-            BufferInfo bufferInfo = iter.next();
-            mNumOutputFrame++;
-            if (DEBUG) {
-                Log.d(TAG,
-                        "In OutputBufferAvailable ,"
-                                + " output frame number = " + mNumOutputFrame
-                                + " timestamp = " + bufferInfo.presentationTimeUs
-                                + " size = " + bufferInfo.size);
-            }
-            if (mIBufferSend != null) {
-                IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
-                info.buf = mc.getOutputBuffer(outputBufferId);
-                info.idx = outputBufferId;
-                info.obj = mc;
-                info.bytesRead = bufferInfo.size;
-                info.presentationTimeUs = bufferInfo.presentationTimeUs;
-                info.flag = bufferInfo.flags;
-                info.isComplete = iter.hasNext() ? false : true;
-                mIBufferSend.sendBuffer(this, info);
-            }
-            mSawOutputEOS |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
-        }
         if (mOutputStream != null) {
             try {
                 ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
@@ -212,12 +199,27 @@
                 Log.d(TAG, "Error Dumping File: Exception " + e.toString());
             }
         }
-        if (mIBufferSend == null) {
+        mNumOutputFrame += infos.size();
+        MediaCodec.BufferInfo last = infos.peekLast();
+        if (last != null) {
+            mSawOutputEOS |= ((last.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0);
+        }
+        if (mIBufferSend != null) {
+            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+            info.buf = mc.getOutputBuffer(outputBufferId);
+            info.idx = outputBufferId;
+            info.obj = mc;
+            info.infos = infos;
+            mIBufferSend.sendBuffer(this, info);
+        } else if (mFrameReleaseQueue != null) {
+            ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
+            mFrameReleaseQueue.pushFrame(
+                    outputBufferId, outputBuffer.remaining());
+        } else {
             mc.releaseOutputBuffer(outputBufferId, mRender);
         }
         if (mSawOutputEOS) {
             Log.i(TAG, "Large frame - saw output EOS");
         }
-        // we don't support frame release queue for large audio frame
     }
 }
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index e13f8f7..679b111 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "ServiceUtilities"
 
 #include <audio_utils/clock.h>
+#include <android-base/properties.h>
 #include <binder/AppOpsManager.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
@@ -28,9 +29,11 @@
 #include <media/AidlConversionUtil.h>
 #include <android/content/AttributionSourceState.h>
 
-#include <iterator>
 #include <algorithm>
+#include <iterator>
+#include <mutex>
 #include <pwd.h>
+#include <thread>
 
 /* When performing permission checks we do not use permission cache for
  * runtime permissions (protection level dangerous) as they may change at
@@ -396,6 +399,106 @@
     return NO_ERROR;
 }
 
+// TODO(b/285588444), clean this up on main, but soak it for backporting purposes for now
+namespace {
+class BluetoothPermissionCache {
+    static constexpr auto SYSPROP_NAME = "cache_key.system_server.package_info";
+    const String16 BLUETOOTH_PERM {"android.permission.BLUETOOTH_CONNECT"};
+    mutable std::mutex mLock;
+    // Cached property conditionally defined, since only avail on bionic. On host, don't inval cache
+#if defined(__BIONIC__)
+    // Unlocked, but only accessed from mListenerThread
+    base::CachedProperty mCachedProperty;
+#endif
+    // This thread is designed to never join/terminate, so no signal is fine
+    const std::thread mListenerThread;
+    GUARDED_BY(mLock)
+    std::string mPropValue;
+    GUARDED_BY(mLock)
+    std::unordered_map<uid_t, bool> mCache;
+    PermissionController mPc{};
+public:
+    BluetoothPermissionCache()
+#if defined(__BIONIC__)
+            : mCachedProperty{SYSPROP_NAME},
+            mListenerThread([this]() mutable {
+                    while (true) {
+                        std::string newVal = mCachedProperty.WaitForChange() ?: "";
+                        std::lock_guard l{mLock};
+                        if (newVal != mPropValue) {
+                            ALOGV("Bluetooth permission update");
+                            mPropValue = newVal;
+                            mCache.clear();
+                        }
+                    }
+                })
+#endif
+            {}
+
+    bool checkPermission(uid_t uid, pid_t pid) {
+        std::lock_guard l{mLock};
+        auto it = mCache.find(uid);
+        if (it == mCache.end()) {
+            it = mCache.insert({uid, mPc.checkPermission(BLUETOOTH_PERM, pid, uid)}).first;
+        }
+        return it->second;
+    }
+};
+
+// Don't call this from locks, since it potentially calls up to system server!
+// Check for non-app UIDs above this method!
+bool checkBluetoothPermission(const AttributionSourceState& attr) {
+    [[clang::no_destroy]]  static BluetoothPermissionCache impl{};
+    return impl.checkPermission(attr.uid, attr.pid);
+}
+} // anonymous
+
+/**
+ * Determines if the MAC address in Bluetooth device descriptors returned by APIs of
+ * a native audio service (audio flinger, audio policy) must be anonymized.
+ * MAC addresses returned to system server or apps with BLUETOOTH_CONNECT permission
+ * are not anonymized.
+ *
+ * @param attributionSource The attribution source of the calling app.
+ * @param caller string identifying the caller for logging.
+ * @return true if the MAC addresses must be anonymized, false otherwise.
+ */
+bool mustAnonymizeBluetoothAddress(
+        const AttributionSourceState& attributionSource, const String16&) {
+    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
+    bool res;
+    switch(multiuser_get_app_id(uid)) {
+        case AID_ROOT:
+        case AID_SYSTEM:
+        case AID_RADIO:
+        case AID_BLUETOOTH:
+        case AID_MEDIA:
+        case AID_AUDIOSERVER:
+            // Don't anonymize for privileged clients
+            res = false;
+            break;
+        default:
+            res = !checkBluetoothPermission(attributionSource);
+            break;
+    }
+    ALOGV("%s uid: %d, result: %d", __func__, uid, res);
+    return res;
+}
+
+/**
+ * Modifies the passed MAC address string in place for consumption by unprivileged clients.
+ * the string is assumed to have a valid MAC address format.
+ * the anonymization must be kept in sync with toAnonymizedAddress() in BluetoothUtils.java
+ *
+ * @param address input/output the char string contining the MAC address to anonymize.
+ */
+void anonymizeBluetoothAddress(char *address) {
+    if (address == nullptr || strlen(address) != strlen("AA:BB:CC:DD:EE:FF")) {
+        return;
+    }
+    memcpy(address, "XX:XX:XX:XX", strlen("XX:XX:XX:XX"));
+}
+
 sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
     const sp<IServiceManager> sm = defaultServiceManager();
     if (sm == nullptr) {
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 2631469..461e190 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -114,6 +114,10 @@
 bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
 bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
 void purgePermissionCache();
+bool mustAnonymizeBluetoothAddress(
+        const AttributionSourceState& attributionSource, const String16& caller);
+void anonymizeBluetoothAddress(char *address);
+
 int32_t getOpForSource(audio_source_t source);
 
 AttributionSourceState getCallingAttributionSource();
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 8b3bdcf..a1a0634 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -629,6 +629,7 @@
         bool isSpatialized;
         bool isBitPerfect;
         float volume;
+        bool muted;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
                                             &streamType, adjAttributionSource,
@@ -637,7 +638,8 @@
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
                                             deviceId, &portId, &secondaryOutputs, &isSpatialized,
                                             &isBitPerfect,
-                                            &volume);
+                                            &volume,
+                                            &muted);
         if (ret != NO_ERROR) {
             config->sample_rate = fullConfig.sample_rate;
             config->channel_mask = fullConfig.channel_mask;
@@ -905,6 +907,26 @@
         // Unknown arg silently ignored
     }
 
+    {
+        std::string res;
+        res.reserve(100);
+        res += "Start begin: ";
+        const auto startTimeStr = audio_utils_time_string_from_ns(mStartTime);
+        res += startTimeStr.time;
+        const auto startFinishedTime = getStartupFinishedTime();
+        if (startFinishedTime != 0) {
+            res += "\nStart end:   ";
+            const auto startEndStr = audio_utils_time_string_from_ns(startFinishedTime);
+            res += startEndStr.time;
+        } else {
+            res += "\nStartup not yet finished!";
+        }
+        const auto nowTimeStr = audio_utils_time_string_from_ns(audio_utils_get_real_time_ns());
+        res += "\nNow:         ";
+        res += nowTimeStr.time;
+        res += "\n";
+        writeStr(fd, res);
+    }
     // get state of hardware lock
     {
         FallibleLockGuard l{hardwareMutex()};
@@ -1009,14 +1031,14 @@
     return NO_ERROR;
 }
 
-sp<Client> AudioFlinger::registerPid(pid_t pid)
+sp<Client> AudioFlinger::registerClient(pid_t pid, uid_t uid)
 {
     audio_utils::lock_guard _cl(clientMutex());
     // If pid is already in the mClients wp<> map, then use that entry
     // (for which promote() is always != 0), otherwise create a new entry and Client.
     sp<Client> client = mClients.valueFor(pid).promote();
     if (client == 0) {
-        client = sp<Client>::make(sp<IAfClientCallback>::fromExisting(this), pid);
+        client = sp<Client>::make(sp<IAfClientCallback>::fromExisting(this), pid, uid);
         mClients.add(pid, client);
     }
 
@@ -1097,6 +1119,7 @@
     bool isSpatialized = false;
     bool isBitPerfect = false;
     float volume;
+    bool muted;
 
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
@@ -1157,7 +1180,7 @@
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             adjAttributionSource, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs,
-                                            &isSpatialized, &isBitPerfect, &volume);
+                                            &isSpatialized, &isBitPerfect, &volume, &muted);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1194,7 +1217,7 @@
             goto Exit;
         }
 
-        client = registerPid(adjAttributionSource.pid);
+        client = registerClient(adjAttributionSource.pid, adjAttributionSource.uid);
 
         IAfPlaybackThread* effectThread = nullptr;
         sp<IAfEffectChain> effectChain = nullptr;
@@ -1214,7 +1237,7 @@
         if (effectThread == nullptr) {
             effectChain = getOrphanEffectChain_l(sessionId);
         }
-        ALOGV("createTrack() sessionId: %d volume: %f", sessionId, volume);
+        ALOGV("createTrack() sessionId: %d volume: %f muted %d", sessionId, volume, muted);
 
         output.sampleRate = input.config.sample_rate;
         output.frameCount = input.frameCount;
@@ -1229,7 +1252,7 @@
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
                                       &lStatus, portId, input.audioTrackCallback, isSpatialized,
-                                      isBitPerfect, &output.afTrackFlags, volume);
+                                      isBitPerfect, &output.afTrackFlags, volume, muted);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -1653,7 +1676,7 @@
 }
 
 status_t AudioFlinger::setStreamVolume(audio_stream_type_t stream, float value,
-        audio_io_handle_t output)
+        bool muted, audio_io_handle_t output)
 {
     // check calling permissions
     if (!settingsAllowed()) {
@@ -1675,14 +1698,14 @@
     if (volumeInterface == NULL) {
         return BAD_VALUE;
     }
-    volumeInterface->setStreamVolume(stream, value);
+    volumeInterface->setStreamVolume(stream, value, muted);
 
     return NO_ERROR;
 }
 
 status_t AudioFlinger::setPortsVolume(
-        const std::vector<audio_port_handle_t>& ports, float volume, audio_io_handle_t output)
-{
+        const std::vector<audio_port_handle_t> &ports, float volume, bool muted,
+        audio_io_handle_t output) {
     for (const auto& port : ports) {
         if (port == AUDIO_PORT_HANDLE_NONE) {
             return BAD_VALUE;
@@ -1697,12 +1720,12 @@
     audio_utils::lock_guard lock(mutex());
     IAfPlaybackThread *thread = checkPlaybackThread_l(output);
     if (thread != nullptr) {
-        return thread->setPortsVolume(ports, volume);
+        return thread->setPortsVolume(ports, volume, muted);
     }
     const sp<IAfMmapThread> mmapThread = checkMmapThread_l(output);
     if (mmapThread != nullptr && mmapThread->isOutput()) {
         IAfMmapPlaybackThread *mmapPlaybackThread = mmapThread->asIAfMmapPlaybackThread().get();
-        return mmapPlaybackThread->setPortsVolume(ports, volume);
+        return mmapPlaybackThread->setPortsVolume(ports, volume, muted);
     }
     return BAD_VALUE;
 }
@@ -2489,7 +2512,7 @@
     output.selectedDeviceId = input.selectedDeviceId;
     output.flags = input.flags;
 
-    client = registerPid(VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid)));
+    client = registerClient(adjAttributionSource.pid, adjAttributionSource.uid);
 
     // Not a conventional loop, but a retry loop for at most two iterations total.
     // Try first maybe with FAST flag then try again without FAST flag if that fails.
@@ -4097,7 +4120,8 @@
                                                        0ns /* timeout */,
                                                        frameCountToBeReady,
                                                        track->getSpeed(),
-                                                       1.f /* volume */);
+                                                       1.f /* volume */,
+                                                       false /* muted */);
         status = patchTrack->initCheck();
         if (status != NO_ERROR) {
             ALOGE("Secondary output patchTrack init failed: %d", status);
@@ -4412,7 +4436,7 @@
         audio_utils::lock_guard _l(mutex());
 
         if (sessionId == AUDIO_SESSION_DEVICE) {
-            sp<Client> client = registerPid(currentPid);
+            sp<Client> client = registerClient(currentPid, adjAttributionSource.uid);
             ALOGV("%s device type %#x address %s", __func__, device.mType, device.getAddress());
             handle = mDeviceEffectManager->createEffect_l(
                     &descOut, device, client, effectClient, mPatchPanel->patches_l(),
@@ -4474,7 +4498,7 @@
                     goto Exit;
                 }
                 ALOGV("%s() got io %d for effect %s", __func__, io, descOut.name);
-                sp<Client> client = registerPid(currentPid);
+                sp<Client> client = registerClient(currentPid, adjAttributionSource.uid);
                 bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
                 handle = createOrphanEffect_l(client, effectClient, priority, sessionId,
                                               &descOut, &enabledOut, &lStatus, pinned,
@@ -4536,7 +4560,7 @@
             }
         }
 
-        sp<Client> client = registerPid(currentPid);
+        sp<Client> client = registerClient(currentPid, adjAttributionSource.uid);
 
         // create effect on selected output thread
         bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 144e509..042194f 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -27,6 +27,7 @@
 #include "IAfTrack.h"
 #include "MelReporter.h"
 #include "PatchCommandThread.h"
+#include "audio_utils/clock.h"
 
 // External classes
 #include <audio_utils/mutex.h>
@@ -65,6 +66,11 @@
 
     status_t resetReferencesForTest();
 
+    // Called by main when startup finished -- for logging purposes only
+    void startupFinished() {
+        mStartupFinishedTime.store(audio_utils_get_real_time_ns(), std::memory_order_release);
+    }
+
 private:
 
     // ---- begin IAudioFlinger interface
@@ -93,12 +99,12 @@
     status_t getMasterBalance(float* balance) const final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setStreamVolume(audio_stream_type_t stream, float value,
-            audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
+            bool muted, audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
     status_t setStreamMute(audio_stream_type_t stream, bool muted) final
             EXCLUDES_AudioFlinger_Mutex;
 
     status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
-            audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
+                            bool muted, audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -420,6 +426,10 @@
 
     sp<EffectsFactoryHalInterface> getEffectsFactory();
 
+    int64_t getStartupFinishedTime() {
+        return mStartupFinishedTime.load(std::memory_order_acquire);
+    }
+
 public:
     // TODO(b/292281786): Remove this when Oboeservice can get access to
     // openMmapStream through an IAudioFlinger handle directly.
@@ -728,7 +738,8 @@
                 // Audio data transfer is directly handled by the client creating the MMAP stream
     DefaultKeyedVector<audio_io_handle_t, sp<IAfMmapThread>> mMmapThreads GUARDED_BY(mutex());
 
-    sp<Client> registerPid(pid_t pid) EXCLUDES_AudioFlinger_ClientMutex; // always returns non-0
+    // always returns non-null
+    sp<Client> registerClient(pid_t pid, uid_t uid) EXCLUDES_AudioFlinger_ClientMutex;
 
     sp<IAfEffectHandle> createOrphanEffect_l(const sp<Client>& client,
                                           const sp<media::IEffectClient>& effectClient,
@@ -802,6 +813,10 @@
 
     // Local interface to AudioPolicyService, late inited, but logically const
     mediautils::atomic_sp<media::IAudioPolicyServiceLocal> mAudioPolicyServiceLocal;
+
+    const int64_t mStartTime = audio_utils_get_real_time_ns();
+    // Late-inited from main()
+    std::atomic<int64_t> mStartupFinishedTime {};
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/Client.cpp b/services/audioflinger/Client.cpp
index 93599ac..4858469 100644
--- a/services/audioflinger/Client.cpp
+++ b/services/audioflinger/Client.cpp
@@ -18,9 +18,10 @@
 
 namespace android {
 
-Client::Client(const sp<IAfClientCallback>& afClientCallback, pid_t pid)
+Client::Client(const sp<IAfClientCallback>& afClientCallback, pid_t pid, uid_t uid)
     : mAfClientCallback(afClientCallback)
     , mPid(pid)
+    , mUid(uid)
     , mClientAllocator(AllocatorFactory::getClientAllocator()) {}
 
 // Client destructor must be called with AudioFlinger::mClientLock held
@@ -34,4 +35,4 @@
     return mClientAllocator;
 }
 
-}   // namespace android
\ No newline at end of file
+}   // namespace android
diff --git a/services/audioflinger/Client.h b/services/audioflinger/Client.h
index ff0d751..c2fef39 100644
--- a/services/audioflinger/Client.h
+++ b/services/audioflinger/Client.h
@@ -42,13 +42,14 @@
 
 class Client : public RefBase {
 public:
-    Client(const sp<IAfClientCallback>& audioFlinger, pid_t pid);
+    Client(const sp<IAfClientCallback>& audioFlinger, pid_t pid, uid_t uid);
 
     // TODO(b/289139675) make Client container.
     // Client destructor must be called with AudioFlinger::mClientLock held
     ~Client() override;
     AllocatorFactory::ClientAllocator& allocator();
     pid_t pid() const { return mPid; }
+    uid_t uid() const { return mUid; }
     const auto& afClientCallback() const { return mAfClientCallback; }
 
 private:
@@ -56,6 +57,7 @@
 
     const sp<IAfClientCallback> mAfClientCallback;
     const pid_t mPid;
+    const uid_t mUid;
     AllocatorFactory::ClientAllocator mClientAllocator;
 };
 
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index abb8f2f..a13819c 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -481,7 +481,8 @@
             bool isSpatialized,
             bool isBitPerfect,
             audio_output_flags_t* afTrackFlags,
-            float volume)
+            float volume,
+            bool muted)
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
@@ -558,8 +559,8 @@
     virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
             EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
-            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &portIds, float volume,
+                                    bool muted) EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
@@ -700,8 +701,8 @@
 
     virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
 
-    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
-            EXCLUDES_ThreadBase_Mutex = 0;
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+                                    bool muted) EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfMmapCaptureThread : public virtual IAfMmapThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index ee834d6..1b10f81 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -291,7 +291,18 @@
             float speed = 1.0f,
             bool isSpatialized = false,
             bool isBitPerfect = false,
-            float volume = 0.0f);
+            float volume = 0.0f,
+            bool muted = false);
+
+    static constexpr std::string_view getLogHeader() {
+        using namespace std::literals;
+        return "Type     Id Active Client(pid/uid) Session Port Id S  Flags "
+                        "  Format Chn mask  SRate "
+                        "ST Usg CT "
+                        " G db  L dB  R dB  VS dB  PortVol dB  PortMuted"
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
+                        "   Latency\n"sv;
+    }
 
     virtual void pause() = 0;
     virtual void flush() = 0;
@@ -466,7 +477,14 @@
             const android::content::AttributionSourceState& attributionSource,
             pid_t creatorPid,
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
-            float volume = 0.0f);
+            float volume = 0.0f,
+            bool muted = false);
+
+    static constexpr std::string_view getLogHeader() {
+        using namespace std::literals;
+        return "Client(pid/uid) Session Port Id"
+                "   Format Chn mask  SRate Flags Usg/Src PortVol dB PortMuted\n"sv;
+    };
 
     // protected by MMapThread::mLock
     virtual void setSilenced_l(bool silenced) = 0;
@@ -511,6 +529,13 @@
             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
             int32_t startFrames = -1);
 
+    static constexpr std::string_view getLogHeader() {
+        using namespace std::literals;
+        return "Active     Id Client(pid/uid) Session Port Id  S  Flags  "
+                        " Format Chn mask  SRate Source  "
+                        " Server FrmCnt FrmRdy Sil   Latency\n"sv;
+    }
+
     // clear the buffer overflow flag
     virtual void clearOverflow() = 0;
     // set the buffer overflow flag and return previous value
@@ -587,7 +612,8 @@
                                              *  the lowest possible latency
                                              *  even if it might glitch. */
             float speed = 1.0f,
-            float volume = 1.0f);
+            float volume = 1.0f,
+            bool muted = false);
 };
 
 class IAfPatchRecord : public virtual IAfRecordTrack, public virtual IAfPatchTrackBase {
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 8758bd0..30bbd5d 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -36,7 +36,8 @@
                             const android::content::AttributionSourceState& attributionSource,
                             pid_t creatorPid,
                             audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
-                            float volume = 0.0f);
+                            float volume = 0.0f,
+                            bool muted = false);
     ~MmapTrack() override;
 
     status_t initCheck() const final;
@@ -71,7 +72,11 @@
     void setPortVolume(float volume) override {
         mVolume = volume;
     }
+    void setPortMute(bool muted) override {
+        mMuteState.muteFromPortVolume = muted;
+    }
     float getPortVolume() const override { return mVolume; }
+    bool getPortMute() const override { return mMuteState.muteFromPortVolume; }
 
 private:
     DISALLOW_COPY_AND_ASSIGN(MmapTrack);
@@ -86,6 +91,7 @@
     void onTimestamp(const ExtendedTimestamp &timestamp) final;
 
     const pid_t mPid;
+    const uid_t mUid;
     bool  mSilenced;            // protected by MMapThread::mLock
     bool  mSilencedNotified;    // protected by MMapThread::mLock
 
@@ -99,4 +105,4 @@
     float mVolume = 0.0f;
 };  // end of Track
 
-} // namespace android
\ No newline at end of file
+} // namespace android
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index d0b96de..be59299 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -650,7 +650,8 @@
                                            {} /*timeout*/,
                                            frameCountToBeReady,
                                            1.0f /*speed*/,
-                                           1.0f /*volume*/);
+                                           1.0f /*volume*/,
+                                           false /*muted*/);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 84758a4..70bab6a 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -97,7 +97,8 @@
                                 float speed = 1.0f,
                                 bool isSpatialized = false,
                                 bool isBitPerfect = false,
-                                float volume = 0.0f);
+                                float volume = 0.0f,
+                                bool muted = false);
     ~Track() override;
     status_t initCheck() const final;
     void appendDumpHeader(String8& result) const final;
@@ -226,7 +227,9 @@
 
     // VolumePortInterface implementation
     void setPortVolume(float volume) override;
+    void setPortMute(bool muted) override;
     float getPortVolume() const override { return mVolume; }
+    bool getPortMute() const override { return mMuteState.load().muteFromPortVolume; }
 
 protected:
 
@@ -410,9 +413,9 @@
     // TODO: replace PersistableBundle with own struct
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
-    mute_state_t        mMuteState;
-    bool                mInternalMute = false;
-    std::atomic<float> mVolume = 0.0f;
+    std::atomic<mute_state_t> mMuteState;
+    bool                      mInternalMute = false;
+    std::atomic<float>        mVolume = 0.0f;
 };  // end of Track
 
 
@@ -510,7 +513,8 @@
                                                                     *  the lowest possible latency
                                                                     *  even if it might glitch. */
                                    float speed = 1.0f,
-                                   float volume = 1.0f);
+                                   float volume = 1.0f,
+                                   bool muted = false);
     ~PatchTrack() override;
 
     size_t framesReady() const final;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 69202ae..1c0b749 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1058,7 +1058,9 @@
     }
 
     dprintf(fd, "  Local log:\n");
-    mLocalLog.dump(fd, "   " /* prefix */, 40 /* lines */);
+    const auto logHeader = this->getLocalLogHeader();
+    write(fd, logHeader.data(), logHeader.length());
+    mLocalLog.dump(fd, "   " /* prefix */);
 
     // --all does the statistics
     bool dumpAll = false;
@@ -2403,7 +2405,8 @@
         bool isSpatialized,
         bool isBitPerfect,
         audio_output_flags_t *afTrackFlags,
-        float volume)
+        float volume,
+        bool muted)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2732,7 +2735,7 @@
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
                           sessionId, creatorPid, attributionSource, trackFlags,
                           IAfTrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
-                          speed, isSpatialized, isBitPerfect, volume);
+                          speed, isSpatialized, isBitPerfect, volume, muted);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2840,10 +2843,14 @@
     }
 }
 
-void PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
+void PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value, bool muted)
 {
+    ALOGV("%s: stream %d value %f muted %d", __func__, stream, value, muted);
     audio_utils::lock_guard _l(mutex());
     mStreamTypes[stream].volume = value;
+    if (com_android_media_audio_ring_my_car()) {
+        mStreamTypes[stream].mute = muted;
+    }
     broadcast_l();
 }
 
@@ -2861,13 +2868,14 @@
 }
 
 status_t PlaybackThread::setPortsVolume(
-        const std::vector<audio_port_handle_t>& portIds, float volume) {
+        const std::vector<audio_port_handle_t>& portIds, float volume, bool muted) {
     audio_utils::lock_guard _l(mutex());
     for (const auto& portId : portIds) {
         for (size_t i = 0; i < mTracks.size(); i++) {
             sp<IAfTrack> track = mTracks[i].get();
             if (portId == track->portId()) {
                 track->setPortVolume(volume);
+                track->setPortMute(muted);
                 break;
             }
         }
@@ -5120,6 +5128,12 @@
     }
 }
 
+std::string PlaybackThread::getLocalLogHeader() const {
+    using namespace std::literals;
+    static constexpr auto indent = "                             "
+                                   "                            "sv;
+    return std::string{indent}.append(IAfTrack::getLogHeader());
+}
 // ----------------------------------------------------------------------------
 
 /* static */
@@ -5851,7 +5865,7 @@
                         volume = masterVolume * mStreamTypes[track->streamType()].volume;
                     }
                 } else {
-                    if (track->isPlaybackRestricted()) {
+                    if (track->isPlaybackRestricted() || track->getPortMute()) {
                         volume = 0.f;
                     } else {
                         volume = masterVolume * track->getPortVolume();
@@ -5875,7 +5889,8 @@
                                            mStreamTypes[track->streamType()].mute,
                                            track->isPlaybackRestricted(),
                                            vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f});
+                                           vh == 0.f,
+                                           /*muteFromPortVolume=*/false});
                 } else {
                     track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
                             /*muteState=*/{masterVolume == 0.f,
@@ -5883,7 +5898,8 @@
                                            /* muteFromStreamMuted= */ false,
                                            track->isPlaybackRestricted(),
                                            vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f});
+                                           vh == 0.f,
+                                           track->getPortMute()});
                 }
                 vlf *= volume;
                 vrf *= volume;
@@ -6047,7 +6063,7 @@
                 }
             } else {
                 v = masterVolume * track->getPortVolume();
-                if (track->isPlaybackRestricted()) {
+                if (track->isPlaybackRestricted() || track->getPortMute()) {
                     v = 0;
                 }
             }
@@ -6077,7 +6093,8 @@
                                            mStreamTypes[track->streamType()].mute,
                                            track->isPlaybackRestricted(),
                                            vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f});
+                                           vh == 0.f,
+                                           /*muteFromPortVolume=*/false});
                 } else {
                     track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
                             /*muteState=*/{masterVolume == 0.f,
@@ -6085,7 +6102,8 @@
                                            /* muteFromStreamMuted= */ false,
                                            track->isPlaybackRestricted(),
                                            vlf == 0.f && vrf == 0.f,
-                                           vh == 0.f});
+                                           vh == 0.f,
+                                           track->getPortMute()});
                 }
                 // now apply the master volume and stream type volume and shaper volume
                 vlf *= v * vh;
@@ -6843,7 +6861,8 @@
                                mStreamTypes[track->streamType()].mute,
                                track->isPlaybackRestricted(),
                                clientVolumeMute,
-                               shaperVolume == 0.f});
+                               shaperVolume == 0.f,
+                               /*muteFromPortVolume=*/false});
     } else {
         if (mMasterMute || track->isPlaybackRestricted()) {
             left = right = 0;
@@ -6871,7 +6890,8 @@
                                /* muteFromStreamMuted= */ false,
                                track->isPlaybackRestricted(),
                                clientVolumeMute,
-                               shaperVolume == 0.f});
+                               shaperVolume == 0.f,
+                               track->getPortMute()});
     }
 
     if (lastTrack) {
@@ -7973,8 +7993,9 @@
         return;
     }
     if (!audioserver_flags::portid_volume_management()) {
-        thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f);
+        thread->setStreamVolume(AUDIO_STREAM_PATCH, /*volume=*/1.0f, /*muted=*/false);
     }
+
     mOutputTracks.add(outputTrack);
     ALOGV("addOutputTrack() track %p, on thread %p", outputTrack.get(), thread);
     updateWaitTime_l();
@@ -10252,6 +10273,13 @@
     }
 }
 
+std::string RecordThread::getLocalLogHeader() const {
+    using namespace std::literals;
+    static constexpr auto indent = "                             "
+                                   "                            "sv;
+    return std::string{indent}.append(IAfRecordTrack::getLogHeader());
+}
+
 // ----------------------------------------------------------------------------
 //      Mmap
 // ----------------------------------------------------------------------------
@@ -10471,6 +10499,7 @@
     const auto localSessionId = mSessionId;
     auto localAttr = mAttr;
     float volume = 0.0f;
+    bool muted = false;
     if (isOutput()) {
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
         config.sample_rate = mSampleRate;
@@ -10495,7 +10524,8 @@
                                             &secondaryOutputs,
                                             &isSpatialized,
                                             &isBitPerfect,
-                                            &volume);
+                                            &volume,
+                                            &muted);
         mutex().lock();
         mAttr = localAttr;
         ALOGD_IF(!secondaryOutputs.empty(),
@@ -10565,7 +10595,7 @@
                                         mChannelMask, mSessionId, isOutput(),
                                         client.attributionSource,
                                         IPCThreadState::self()->getCallingPid(), portId,
-                                        volume);
+                                        volume, muted);
     if (!isOutput()) {
         track->setSilenced_l(isClientSilenced_l(portId));
     }
@@ -11132,6 +11162,13 @@
     write(fd, result.c_str(), result.size());
 }
 
+std::string MmapThread::getLocalLogHeader() const {
+    using namespace std::literals;
+    static constexpr auto indent = "                             "
+                                   "                            "sv;
+    return std::string{indent}.append(IAfMmapTrack::getLogHeader());
+}
+
 /* static */
 sp<IAfMmapPlaybackThread> IAfMmapPlaybackThread::create(
         const sp<IAfThreadCallback>& afThreadCallback, audio_io_handle_t id,
@@ -11216,10 +11253,14 @@
     }
 }
 
-void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
+void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value, bool muted)
 {
+    ALOGV("%s: stream %d value %f muted %d", __func__, stream, value, muted);
     audio_utils::lock_guard _l(mutex());
     mStreamTypes[stream].volume = value;
+    if (com_android_media_audio_ring_my_car()) {
+        mStreamTypes[stream].mute = muted;
+    }
     if (stream == mStreamType) {
         broadcast_l();
     }
@@ -11241,12 +11282,13 @@
 }
 
 status_t MmapPlaybackThread::setPortsVolume(
-        const std::vector<audio_port_handle_t>& portIds, float volume) {
+        const std::vector<audio_port_handle_t>& portIds, float volume, bool muted) {
     audio_utils::lock_guard _l(mutex());
     for (const auto& portId : portIds) {
         for (const sp<IAfMmapTrack>& track : mActiveTracks) {
             if (portId == track->portId()) {
                 track->setPortVolume(volume);
+                track->setPortMute(muted);
                 break;
             }
         }
@@ -11304,7 +11346,11 @@
             // will be broadcasted to all tracks. Thus, take arbitrarily first track volume.
             size_t numtracks = mActiveTracks.size();
             if (numtracks) {
-                volume = mMasterVolume * mActiveTracks[0]->getPortVolume();
+                if (mActiveTracks[0]->getPortMute()) {
+                    volume = 0;
+                } else {
+                    volume = mMasterVolume * mActiveTracks[0]->getPortVolume();
+                }
             }
         }
     }
@@ -11348,7 +11394,8 @@
                         // TODO(b/241533526): adjust logic to include mute from AppOps
                         false /*muteFromPlaybackRestricted*/,
                         false /*muteFromClientVolume*/,
-                        false /*muteFromVolumeShaper*/});
+                        false /*muteFromVolumeShaper*/,
+                        false /*muteFromPortVolume*/});
             } else {
                 track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
                     /*muteState=*/{mMasterMute,
@@ -11357,7 +11404,8 @@
                                    // TODO(b/241533526): adjust logic to include mute from AppOps
                                    false /*muteFromPlaybackRestricted*/,
                                    false /*muteFromClientVolume*/,
-                                   false /*muteFromVolumeShaper*/});
+                                   false /*muteFromVolumeShaper*/,
+                                   track->getPortMute()});
                 }
         }
     }
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index bf37238..0c5a2c3 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -576,6 +576,9 @@
         return mThreadloopExecutor;
     }
 
+    // Used to print the header for the local log on a particular thread type
+    virtual std::string getLocalLogHeader() const { return {}; };
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -888,7 +891,7 @@
                     bool                mHasChanged = false;
                 };
 
-                SimpleLog mLocalLog;  // locked internally
+                SimpleLog mLocalLog {/* maxLogLines= */ 120};  // locked internally
 
     // mThreadloopExecutor contains deferred functors and object (dtors) to
     // be executed at the end of the processing period, without any
@@ -1017,11 +1020,12 @@
     void setMasterVolume(float value) final;
     void setMasterBalance(float balance) override EXCLUDES_ThreadBase_Mutex;
     void setMasterMute(bool muted) final;
-    void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+    void setStreamVolume(audio_stream_type_t stream, float value, bool muted) final
+            EXCLUDES_ThreadBase_Mutex;
     void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
     float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
-    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
-            final EXCLUDES_ThreadBase_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+                            bool muted) final EXCLUDES_ThreadBase_Mutex;
 
     void setVolumeForOutput_l(float left, float right) const final;
 
@@ -1048,7 +1052,8 @@
                                 bool isSpatialized,
                                 bool isBitPerfect,
                                 audio_output_flags_t* afTrackFlags,
-                                float volume) final
+                                float volume,
+                                bool muted) final
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     bool isTrackActive(const sp<IAfTrack>& track) const final {
@@ -1230,6 +1235,9 @@
             override EXCLUDES_ThreadBase_Mutex {
         // Do nothing. It is only used for bit perfect thread
     }
+
+    std::string getLocalLogHeader() const override;
+
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -2134,6 +2142,8 @@
                             return !(mInput == nullptr || mInput->stream == nullptr);
                         }
 
+    std::string getLocalLogHeader() const override;
+
 protected:
     void dumpInternals_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
     void dumpTracks_l(int fd, const Vector<String16>& args) override REQUIRES(mutex());
@@ -2325,6 +2335,8 @@
 
     bool isStreamInitialized() const override { return false; }
 
+    std::string getLocalLogHeader() const override;
+
     void setClientSilencedState_l(audio_port_handle_t portId, bool silenced) REQUIRES(mutex()) {
                                 mClientSilencedStates[portId] = silenced;
                             }
@@ -2395,11 +2407,13 @@
     // Needs implementation?
     void setMasterBalance(float /* value */) final EXCLUDES_ThreadBase_Mutex {}
     void setMasterMute(bool muted) final EXCLUDES_ThreadBase_Mutex;
-    void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
+
+    void setStreamVolume(audio_stream_type_t stream, float value, bool muted) final
+            EXCLUDES_ThreadBase_Mutex;
     void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
     float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
-    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
-            final EXCLUDES_ThreadBase_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+                            bool muted) final EXCLUDES_ThreadBase_Mutex;
 
     void setMasterMute_l(bool muted) REQUIRES(mutex()) { mMasterMute = muted; }
 
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index a692773..0ddbaec 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -728,7 +728,8 @@
         float speed,
         bool isSpatialized,
         bool isBitPerfect,
-        float volume) {
+        float volume,
+        bool muted) {
     return sp<Track>::make(thread,
             client,
             streamType,
@@ -750,7 +751,8 @@
             speed,
             isSpatialized,
             isBitPerfect,
-            volume);
+            volume,
+            muted);
 }
 
 // Track constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held
@@ -776,7 +778,8 @@
             float speed,
             bool isSpatialized,
             bool isBitPerfect,
-            float volume)
+            float volume,
+            bool muted)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -861,11 +864,16 @@
 
     populateUsageAndContentTypeFromStreamType();
 
+    mute_state_t newMuteState = mMuteState.load();
+    newMuteState.muteFromPortVolume = muted;
+
     // Audio patch and call assistant volume are always max
     if (mAttr.usage == AUDIO_USAGE_CALL_ASSISTANT
             || mAttr.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
         mVolume = 1.0f;
+        newMuteState.muteFromPortVolume = false;
     }
+    mMuteState.store(newMuteState);
 
     mServerLatencySupported = checkServerLatencySupported(format, flags);
 #ifdef TEE_SINK
@@ -1000,13 +1008,8 @@
 
 void Track::appendDumpHeader(String8& result) const
 {
-    result.appendFormat("Type     Id Active Client Session Port Id S  Flags "
-                        "  Format Chn mask  SRate "
-                        "ST Usg CT "
-                        " G db  L dB  R dB  VS dB  PortVol dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
-                        "%s\n",
-                        isServerLatencySupported() ? "   Latency" : "");
+    const auto res = IAfTrack::getLogHeader();
+    result.append(res.data(), res.size());
 }
 
 void Track::appendDump(String8& result, bool active) const
@@ -1086,13 +1089,14 @@
             ? 'r' /* buffer reduced */: bufferSizeInFrames > mFrameCount
                     ? 'e' /* error */ : ' ' /* identical */;
 
-    result.appendFormat("%7s %6u %7u %7u %2s 0x%03X "
+    result.appendFormat("%7s %7u/%7u %7u %7u %2s 0x%03X "
                         "%08X %08X %6u "
                         "%2u %3x %2x "
-                        "%5.2g %5.2g %5.2g %5.2g%c %11.2g "
+                        "%5.2g %5.2g %5.2g %5.2g%c %11.2g %12s"
                         "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
-            (mClient == 0) ? getpid() : mClient->pid(),
+            mClient ? mClient->pid() : getpid() ,
+            mClient ? mClient->uid() : getuid(),
             mSessionId,
             mPortId,
             getTrackStateAsCodedString(),
@@ -1112,6 +1116,7 @@
             20.0 * log10(vsVolume.first), // VolumeShaper(s) total volume
             vsVolume.second ? 'A' : ' ',  // if any VolumeShapers active
             20.0 * log10(mVolume),
+            getPortMute() ? "true" : "false",
 
             mCblk->mServer,
             bufferSizeInFrames,
@@ -1618,8 +1623,25 @@
     if (mType != TYPE_PATCH) {
         // Do not recursively propagate a PatchTrack setPortVolume to
         // downstream PatchTracks.
-        forEachTeePatchTrack_l([volume](const auto& patchTrack) {
-                patchTrack->setPortVolume(volume); });
+        forEachTeePatchTrack_l([volume](const auto &patchTrack) {
+            patchTrack->setPortVolume(volume);
+        });
+    }
+}
+
+void Track::setPortMute(bool muted) {
+    mute_state_t newMuteState = mMuteState.load();
+    if (newMuteState.muteFromPortVolume == muted) {
+        return;
+    }
+    newMuteState.muteFromPortVolume = muted;
+    mMuteState.store(newMuteState);
+    if (mType != TYPE_PATCH) {
+        // Do not recursively propagate a PatchTrack setPortVolume to
+        // downstream PatchTracks.
+        forEachTeePatchTrack_l([muted](const auto &patchTrack) {
+            patchTrack->setPortMute(muted);
+        });
     }
 }
 
@@ -1724,8 +1746,8 @@
     }
 
     if (result == OK) {
-        ALOGI("%s(%d): processed mute state for port ID %d from %d to %d", __func__, id(), mPortId,
-                static_cast<int>(mMuteState), static_cast<int>(muteState));
+        ALOGI("%s(%d): processed mute state for port ID %d from %#x to %#x", __func__, id(),
+              mPortId, static_cast<int>(mMuteState.load()), static_cast<int>(muteState));
         mMuteState = muteState;
     } else {
         ALOGW("%s(%d): cannot process mute state for port ID %d, status error %d", __func__, id(),
@@ -2502,7 +2524,8 @@
                                          *  the lowest possible latency
                                          *  even if it might glitch. */
         float speed,
-        float volume)
+        float volume,
+        bool muted)
 {
     return sp<PatchTrack>::make(
             playbackThread,
@@ -2517,7 +2540,8 @@
             timeout,
             frameCountToBeReady,
             speed,
-            volume);
+            volume,
+            muted);
 }
 
 PatchTrack::PatchTrack(IAfPlaybackThread* playbackThread,
@@ -2532,14 +2556,15 @@
                                                      const Timeout& timeout,
                                                      size_t frameCountToBeReady,
                                                      float speed,
-                                                     float volume)
+                                                     float volume,
+                                                     bool muted)
     :   Track(playbackThread, NULL, streamType,
               AUDIO_ATTRIBUTES_INITIALIZER,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
               TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed,
-              false /*isSpatialized*/, false /*isBitPerfect*/, volume),
+              false /*isSpatialized*/, false /*isBitPerfect*/, volume, muted),
         PatchTrackBase(mCblk ? new AudioTrackClientProxy(mCblk, mBuffer, frameCount, mFrameSize,
                         true /*clientInServer*/) : nullptr,
                        playbackThread, timeout)
@@ -2989,21 +3014,20 @@
 
 void RecordTrack::appendDumpHeader(String8& result) const
 {
-    result.appendFormat("Active     Id Client Session Port Id  S  Flags  "
-                        " Format Chn mask  SRate Source  "
-                        " Server FrmCnt FrmRdy Sil%s\n",
-                        isServerLatencySupported() ? "   Latency" : "");
+    const auto res = IAfRecordTrack::getLogHeader();
+    result.append(res.data(), res.size());
 }
 
 void RecordTrack::appendDump(String8& result, bool active) const
 {
-    result.appendFormat("%c%5s %6d %6u %7u %7u  %2s 0x%03X "
+    result.appendFormat("%c%5s %6d %7u/%7u %7u %7u  %2s 0x%03X "
             "%08X %08X %6u %6X "
             "%08X %6zu %6zu %3c",
             isFastTrack() ? 'F' : ' ',
             active ? "yes" : "no",
             mId,
-            (mClient == 0) ? getpid() : mClient->pid(),
+            mClient ? mClient->pid() : getpid(),
+            mClient ? mClient->uid() : getuid(),
             mSessionId,
             mPortId,
             getTrackStateAsCodedString(),
@@ -3524,7 +3548,8 @@
           const android::content::AttributionSourceState& attributionSource,
           pid_t creatorPid,
           audio_port_handle_t portId,
-          float volume)
+          float volume,
+          bool muted)
 {
     return sp<MmapTrack>::make(
             thread,
@@ -3537,7 +3562,8 @@
             attributionSource,
             creatorPid,
             portId,
-            volume);
+            volume,
+            muted);
 }
 
 MmapTrack::MmapTrack(IAfThreadBase* thread,
@@ -3550,7 +3576,8 @@
         const AttributionSourceState& attributionSource,
         pid_t creatorPid,
         audio_port_handle_t portId,
-        float volume)
+        float volume,
+        bool muted)
     :   TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
@@ -3561,14 +3588,17 @@
                   TYPE_DEFAULT, portId,
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
         mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.pid))),
+        mUid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid))),
             mSilenced(false), mSilencedNotified(false), mVolume(volume)
 {
+    mMuteState.muteFromPortVolume = muted;
     // Once this item is logged by the server, the client can add properties.
     mTrackMetrics.logConstructor(creatorPid, uid(), id());
     if (isOut && (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
             || attr.usage == AUDIO_USAGE_VIRTUAL_SOURCE)) {
         // Audio patch and call assistant volume are always max
         mVolume = 1.0f;
+        mMuteState.muteFromPortVolume = false;
     }
 }
 
@@ -3648,14 +3678,15 @@
 
 void MmapTrack::appendDumpHeader(String8& result) const
 {
-    result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s  %s\n",
-                        isOut() ? "Usg CT": "Source", isOut() ? "PortVol dB" : "");
+    const auto res = IAfMmapTrack::getLogHeader();
+    result.append(res.data(), res.size());
 }
 
 void MmapTrack::appendDump(String8& result, bool active __unused) const
 {
-    result.appendFormat("%6u %7u %7u %08X %08X %6u 0x%03X ",
+    result.appendFormat("%7u/%7u %7u %7u %08X %08X %6u 0x%03X ",
             mPid,
+            mUid,
             mSessionId,
             mPortId,
             mFormat,
@@ -3663,10 +3694,11 @@
             mSampleRate,
             mAttr.flags);
     if (isOut()) {
-        result.appendFormat("%3x %2x", mAttr.usage, mAttr.content_type);
+        result.appendFormat("%4x %2x", mAttr.usage, mAttr.content_type);
         result.appendFormat("%11.2g", 20.0 * log10(mVolume));
+        result.appendFormat("%12s", mMuteState.muteFromPortVolume ? "true" : "false");
     } else {
-        result.appendFormat("%6x", mAttr.source);
+        result.appendFormat("%7x", mAttr.source);
     }
     result.append("\n");
 }
diff --git a/services/audioflinger/datapath/VolumeInterface.h b/services/audioflinger/datapath/VolumeInterface.h
index 1564fe1..02b6ade 100644
--- a/services/audioflinger/datapath/VolumeInterface.h
+++ b/services/audioflinger/datapath/VolumeInterface.h
@@ -25,7 +25,7 @@
     virtual void setMasterVolume(float value) = 0;
     virtual void setMasterBalance(float balance) = 0;
     virtual void setMasterMute(bool muted) = 0;
-    virtual void setStreamVolume(audio_stream_type_t stream, float value) = 0;
+    virtual void setStreamVolume(audio_stream_type_t stream, float value, bool muted) = 0;
     virtual void setStreamMute(audio_stream_type_t stream, bool muted) = 0;
     // TODO(b/290699744) add "get" prefix for getter below.
     virtual float streamVolume(audio_stream_type_t stream) const = 0;
diff --git a/services/audioflinger/datapath/VolumePortInterface.h b/services/audioflinger/datapath/VolumePortInterface.h
index fb1c463..fe3b782 100644
--- a/services/audioflinger/datapath/VolumePortInterface.h
+++ b/services/audioflinger/datapath/VolumePortInterface.h
@@ -23,7 +23,10 @@
 class VolumePortInterface : public virtual RefBase {
 public:
     virtual void setPortVolume(float volume) = 0;
+    virtual void setPortMute(bool mute) = 0;
     virtual float getPortVolume() const = 0;
+    /** Returns the muted state defined by the volume group which is playing on this port. */
+    virtual bool getPortMute() const = 0;
 };
 
 }  // namespace android
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index edcb805..3539f00 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -148,7 +148,8 @@
                                       output_type_t *outputType,
                                       bool *isSpatialized,
                                       bool *isBitPerfect,
-                                      float *volume) = 0;
+                                      float *volume,
+                                      bool *muted) = 0;
     // indicates to the audio policy manager that the output starts being used by corresponding
     // stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -197,6 +198,7 @@
     // setting volume for all devices
     virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                           int index,
+                                          bool muted,
                                           audio_devices_t device) = 0;
 
     // retrieve current volume index for the specified stream and the
@@ -208,6 +210,7 @@
 
     virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
                                                  int index,
+                                                 bool muted,
                                                  audio_devices_t device) = 0;
     virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
                                                  int &index,
@@ -514,7 +517,7 @@
     // set a stream volume for a particular output. For the same user setting, a given stream type
     // can have different volumes
     // for each output (destination device) it is attached to.
-    virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
+    virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, bool muted,
                                      audio_io_handle_t output, int delayMs = 0) = 0;
     /**
      * Set volume for given AudioTrack port ids for a particular output.
@@ -522,12 +525,13 @@
      * can have different volumes for each output (destination device) it is attached to.
      * @param ports to consider
      * @param volume to apply
+     * @param muted to apply
      * @param output to consider
      * @param delayMs to use
      * @return NO_ERROR if successful
      */
     virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& ports, float volume,
-            audio_io_handle_t output, int delayMs = 0) = 0;
+            bool muted, audio_io_handle_t output, int delayMs = 0) = 0;
 
     // function enabling to send proprietary informations directly from audio policy manager to
     // audio hardware interface.
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 3b7cae3..d499222 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -29,19 +29,21 @@
 /**
  * Legacy audio policy product strategies IDs. These strategies are supported by the default
  * policy engine.
+ * IMPORTANT NOTE: the order of this enum is important as it determines the priority
+ * between active strategies for routing decisions: lower enum value => higher prioriy
  */
 enum legacy_strategy {
     STRATEGY_NONE = -1,
-    STRATEGY_MEDIA,
     STRATEGY_PHONE,
     STRATEGY_SONIFICATION,
-    STRATEGY_SONIFICATION_RESPECTFUL,
-    STRATEGY_DTMF,
     STRATEGY_ENFORCED_AUDIBLE,
-    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
     STRATEGY_ACCESSIBILITY,
-    STRATEGY_REROUTING,
+    STRATEGY_SONIFICATION_RESPECTFUL,
+    STRATEGY_MEDIA,
+    STRATEGY_DTMF,
     STRATEGY_CALL_ASSISTANT,
+    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
+    STRATEGY_REROUTING,
     STRATEGY_PATCH,
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 4dedcd6..0e1d090 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -39,6 +39,7 @@
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
+        "com.android.media.audio-aconfig-cc",
         "com.android.media.audioserver-aconfig-cc",
         "libaconfig_storage_read_api_cc",
         "libaudioclient_aidl_conversion",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 835fad2..9bceee7 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -89,7 +89,9 @@
 class VolumeActivity : public ActivityTracking
 {
 public:
-    bool isMuted() const { return mMuteCount > 0; }
+    bool isMutedInternally() const { return mMuteCount > 0; }
+    bool isMutedByGroup() const { return mMutedByGroup > 0; }
+    void setMutedByGroup(bool mutedByGroup) { mMutedByGroup = mutedByGroup; }
     int getMuteCount() const { return mMuteCount; }
     int incMuteCount() { return ++mMuteCount; }
     int decMuteCount() { return mMuteCount > 0 ? --mMuteCount : -1; }
@@ -107,6 +109,7 @@
 
 private:
     int mMuteCount = 0; /**< mute request counter */
+    bool mMutedByGroup = false; /**< mute from AudioService, does not add to counter */
     float mCurVolumeDb = NAN; /**< current volume in dB. */
     bool mIsVoice = false; /** true if this volume source is used for voice call volume */
 };
@@ -209,16 +212,25 @@
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
                     mVolumeActivities.at(vs).getActivityCount() : 0;
     }
-    bool isMuted(VolumeSource vs) const
+    bool isMutedInternally(VolumeSource vs) const
     {
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
-                    mVolumeActivities.at(vs).isMuted() : false;
+                    mVolumeActivities.at(vs).isMutedInternally() : false;
     }
     int getMuteCount(VolumeSource vs) const
     {
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
                     mVolumeActivities.at(vs).getMuteCount() : 0;
     }
+    bool isMutedByGroup(VolumeSource vs)
+    {
+        return mVolumeActivities.find(vs) != std::end(mVolumeActivities)?
+               mVolumeActivities.at(vs).isMutedByGroup() : false;
+    }
+    bool hasVolumeSource(VolumeSource vs)
+    {
+        return mVolumeActivities.find(vs) != std::end(mVolumeActivities);
+    }
     int incMuteCount(VolumeSource vs)
     {
         return mVolumeActivities[vs].incMuteCount();
@@ -227,10 +239,11 @@
     {
         return mVolumeActivities[vs].decMuteCount();
     }
-    void setCurVolume(VolumeSource vs, float volumeDb, bool isVoiceVolSrc)
+    void setCurVolume(VolumeSource vs, float volumeDb, bool mutedByGroup, bool isVoiceVolSrc)
     {
         // Even if not activity for this source registered, need to create anyway
         mVolumeActivities[vs].setVolume(volumeDb);
+        mVolumeActivities[vs].setMutedByGroup(mutedByGroup);
         mVolumeActivities[vs].setIsVoice(isVoiceVolSrc);
     }
     float getCurVolume(VolumeSource vs) const
diff --git a/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h b/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h
index fd8b81a..ebfc597 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IVolumeCurves.h
@@ -41,6 +41,8 @@
     virtual status_t initVolume(int indexMin, int indexMax) = 0;
     virtual std::vector<audio_attributes_t> getAttributes() const = 0;
     virtual std::vector<audio_stream_type_t> getStreamTypes() const = 0;
+    virtual void setIsMuted(bool isMuted) = 0;
+    virtual bool isMuted() const = 0;
     virtual void dump(String8 *dst, int spaces = 0, bool curvePoints = false) const = 0;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 2c41de4..c417462 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -22,6 +22,7 @@
 #include <AudioPolicyInterface.h>
 #include "AudioOutputDescriptor.h"
 #include "AudioPolicyMix.h"
+#include <com_android_media_audio.h>
 #include "IOProfile.h"
 #include "Volume.h"
 #include "HwModule.h"
@@ -161,7 +162,7 @@
     return false;
 }
 
-bool AudioOutputDescriptor::setVolume(float volumeDb, bool /*muted*/,
+bool AudioOutputDescriptor::setVolume(float volumeDb, bool mutedByGroup,
                                       VolumeSource volumeSource,
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
@@ -169,7 +170,6 @@
                                       bool force,
                                       bool isVoiceVolSrc)
 {
-
     if (!supportedDevices().containsDeviceAmongTypes(deviceTypes)) {
         ALOGV("%s output ID %d unsupported device %s",
                 __func__, getId(), toString(deviceTypes).c_str());
@@ -177,10 +177,14 @@
     }
     // We actually change the volume if:
     // - the float value returned by computeVolume() changed
+    // - the muted state changed
     // - the force flag is set
-    if (volumeDb != getCurVolume(volumeSource) || force) {
-        ALOGV("%s for volumeSrc %d, volume %f, delay %d", __func__, volumeSource, volumeDb, delayMs);
-        setCurVolume(volumeSource, volumeDb, isVoiceVolSrc);
+    const bool mutedChanged =
+            com_android_media_audio_ring_my_car() && (isMutedByGroup(volumeSource) != mutedByGroup);
+    if (volumeDb != getCurVolume(volumeSource) || mutedChanged  || force) {
+        ALOGV("%s for volumeSrc %d, volume %f, mutedByGroup %d,  delay %d", __func__, volumeSource,
+              volumeDb, mutedByGroup, delayMs);
+        setCurVolume(volumeSource, volumeDb, mutedByGroup, isVoiceVolSrc);
         return true;
     }
     return false;
@@ -497,7 +501,7 @@
 }
 
 void SwAudioOutputDescriptor::setSwMute(
-        bool muted, VolumeSource vs, const StreamTypeVector &streamTypes,
+        bool mutedByGroup, VolumeSource vs, const StreamTypeVector &streamTypes,
         const DeviceTypeSet& deviceTypes, uint32_t delayMs) {
     // volume source active and more than one volume source is active, otherwise, no-op or let
     // setVolume controlling SW and/or HW Gains
@@ -506,11 +510,11 @@
             for (const auto& devicePort : devices()) {
                 if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
                     devicePort->hasGainController(true /*canUseForVolume*/)) {
-                    float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
                     ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
-                          mIoHandle, vs, muted, getActiveVolumeSources().size());
+                          mIoHandle, vs, mutedByGroup, getActiveVolumeSources().size());
                     for (const auto &stream : streamTypes) {
-                        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                        mClientInterface->setStreamVolume(stream, Volume::DbToAmpl(0), mutedByGroup,
+                                                          mIoHandle, delayMs);
                     }
                     return;
                 }
@@ -521,11 +525,12 @@
             for (const auto &devicePort: devices()) {
                 if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
                     devicePort->hasGainController(true /*canUseForVolume*/)) {
-                    float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                    float volumeAmpl = Volume::DbToAmpl(0);
                     ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
-                          mIoHandle, vs, muted, getActiveVolumeSources().size());
+                          mIoHandle, vs, mutedByGroup, getActiveVolumeSources().size());
                     mClientInterface->setPortsVolume(
-                            getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
+                            getPortsForVolumeSource(vs), Volume::DbToAmpl(0), mutedByGroup,
+                            mIoHandle, delayMs);
                     return;
                 }
             }
@@ -533,7 +538,7 @@
     }
 }
 
-bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool muted,
+bool SwAudioOutputDescriptor::setVolume(float volumeDb, bool mutedByGroup,
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
@@ -542,18 +547,22 @@
 {
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
-            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
+            volumeDb, mutedByGroup, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
         if (hasStream(streamTypes, AUDIO_STREAM_BLUETOOTH_SCO)) {
             VolumeSource callVolSrc = getVoiceSource();
-            if (callVolSrc != VOLUME_SOURCE_NONE && volumeDb != getCurVolume(callVolSrc)) {
-                setCurVolume(callVolSrc, volumeDb, true);
+            const bool mutedChanged =
+                    com_android_media_audio_ring_my_car() && hasVolumeSource(callVolSrc) &&
+                    (isMutedByGroup(callVolSrc) != mutedByGroup);
+            if (callVolSrc != VOLUME_SOURCE_NONE &&
+                (volumeDb != getCurVolume(callVolSrc) || mutedChanged)) {
+                setCurVolume(callVolSrc, volumeDb, mutedByGroup, true);
                 float volumeAmpl = Volume::DbToAmpl(volumeDb);
                 if (audioserver_flags::portid_volume_management()) {
                     mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc),
-                            volumeAmpl, mIoHandle, delayMs);
+                            volumeAmpl, mutedByGroup, mIoHandle, delayMs);
                 } else {
                     mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL,
-                            volumeAmpl, mIoHandle, delayMs);
+                            volumeAmpl, mutedByGroup, mIoHandle, delayMs);
                 }
             }
         }
@@ -580,18 +589,19 @@
                 // Allows to mute SW Gain on AudioFlinger only for volume group with explicit
                 // stream(s)
                 if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
-                    const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
-                    float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
+                    const bool canMute = mutedByGroup && !streamTypes.empty();
+                    const float volumeAmpl = Volume::DbToAmpl(0);
                     for (const auto &stream: streams) {
-                        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                        mClientInterface->setStreamVolume(stream, volumeAmpl, canMute, mIoHandle,
+                                                          delayMs);
                     }
                 }
             } else {
-                float volumeAmpl = (muted && volumeDb != 0.0f) ? 0.0f : Volume::DbToAmpl(0);
+                float volumeAmpl = Volume::DbToAmpl(0);
                 ALOGV("%s: output: %d, vs: %d, active vs count: %zu", __func__,
                       mIoHandle, vs, getActiveVolumeSources().size());
                 mClientInterface->setPortsVolume(
-                        getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
+                        getPortsForVolumeSource(vs), volumeAmpl, mutedByGroup, mIoHandle, delayMs);
             }
             AudioGains gains = devicePort->getGains();
             int gainMinValueInMb = gains[0]->getMinValueInMb();
@@ -615,26 +625,27 @@
         if (audioserver_flags::portid_volume_management()) {
             if (callVolSrc != VOLUME_SOURCE_NONE) {
                 mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc), volumeAmpl,
-                        mIoHandle, delayMs);
+                                                 mutedByGroup, mIoHandle, delayMs);
             }
         } else {
-            mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle,
-                    delayMs);
+            mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mutedByGroup,
+                                              mIoHandle, delayMs);
         }
         if (callVolSrc != VOLUME_SOURCE_NONE) {
-            setCurVolume(callVolSrc, getCurVolume(vs), true);
+            setCurVolume(callVolSrc, getCurVolume(vs), mutedByGroup, true);
         }
     }
     if (audioserver_flags::portid_volume_management()) {
-        ALOGV("%s output %d for volumeSource %d, volume %f, delay %d active=%d", __func__,
-              mIoHandle, vs, volumeDb, delayMs, isActive(vs));
-        mClientInterface->setPortsVolume(getPortsForVolumeSource(vs), volumeAmpl, mIoHandle,
-                                         delayMs);
+        ALOGV("%s output %d for volumeSource %d, volume %f, mutedByGroup %d, delay %d active=%d",
+              __func__, mIoHandle, vs, volumeDb, mutedByGroup, delayMs, isActive(vs));
+        mClientInterface->setPortsVolume(getPortsForVolumeSource(vs), volumeAmpl, mutedByGroup,
+                                         mIoHandle, delayMs);
     } else {
         for (const auto &stream : streams) {
-            ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
-                  mIoHandle, vs, volumeDb, delayMs, toString(stream).c_str());
-            mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+            ALOGV("%s output %d for volumeSource %d, volume %f, mutedByGroup %d delay %d stream=%s",
+                  __func__, mIoHandle, vs, volumeDb, mutedByGroup, delayMs,
+                  toString(stream).c_str());
+            mClientInterface->setStreamVolume(stream, volumeAmpl, mutedByGroup, mIoHandle, delayMs);
         }
     }
     return true;
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 8162720..9b1125d 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -92,6 +92,10 @@
 
     bool isDefault() const;
 
+    bool isPatchStrategy() const {
+        return getVolumeGroupForStreamType(AUDIO_STREAM_PATCH) != VOLUME_GROUP_NONE;
+    }
+
     void dump(String8 *dst, int spaces = 0) const;
 
 private:
diff --git a/services/audiopolicy/engine/common/include/VolumeCurve.h b/services/audiopolicy/engine/common/include/VolumeCurve.h
index 2e75ff1..e5f7a41 100644
--- a/services/audiopolicy/engine/common/include/VolumeCurve.h
+++ b/services/audiopolicy/engine/common/include/VolumeCurve.h
@@ -179,6 +179,12 @@
     }
     StreamTypeVector getStreamTypes() const override { return mStreams; }
 
+    void setIsMuted(bool isMuted)
+    {
+        mIsMuted = isMuted;
+    }
+    bool isMuted() const { return mIsMuted; }
+
     void dump(String8 *dst, int spaces = 0, bool curvePoints = false) const override;
 
 private:
@@ -187,6 +193,7 @@
     int mIndexMin; /**< min volume index. */
     int mIndexMax; /**< max volume index. */
     const bool mCanBeMuted = true; /**< true is the stream can be muted. */
+    bool mIsMuted = false; /**< true if the stream is currently muted. */
 
     AttributesVector mAttributes;
     StreamTypeVector mStreams; /**< Keep it for legacy. */
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 976791f..fb8379e 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -311,6 +311,9 @@
     }
     StrategyVector orderedStrategies;
     for (const auto &iter : strategies) {
+        if (iter.second->isPatchStrategy()) {
+            continue;
+        }
         orderedStrategies.push_back(iter.second->getId());
     }
     return orderedStrategies;
@@ -742,6 +745,9 @@
     auto defaultDevices = DeviceVector(getApmObserver()->getDefaultOutputDevice());
     for (const auto &iter : getProductStrategies()) {
         const auto &strategy = iter.second;
+        if (strategy->isPatchStrategy()) {
+            continue;
+        }
         mDevicesForStrategies[strategy->getId()] = defaultDevices;
         setStrategyDevices(strategy, defaultDevices);
     }
@@ -750,6 +756,9 @@
 void EngineBase::updateDeviceSelectionCache() {
     for (const auto &iter : getProductStrategies()) {
         const auto& strategy = iter.second;
+        if (strategy->isPatchStrategy()) {
+            continue;
+        }
         auto devices = getDevicesForProductStrategy(strategy->getId());
         mDevicesForStrategies[strategy->getId()] = devices;
         setStrategyDevices(strategy, devices);
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index c9a77a4..27a290f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -45,4 +45,7 @@
         "libparameter",
         "libutils",
     ],
+    defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index fd40c04..a5f37b0 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -266,6 +266,7 @@
     bool isSpatialized;
     bool isBitPerfect;
     float volume;
+    bool muted;
 
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource;
@@ -273,7 +274,7 @@
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
             &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
-            &isBitPerfect, &volume) != OK) {
+            &isBitPerfect, &volume, &muted) != OK) {
         return false;
     }
     if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index c1c4a28..13db5b3 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1512,7 +1512,8 @@
                                               output_type_t *outputType,
                                               bool *isSpatialized,
                                               bool *isBitPerfect,
-                                              float *volume)
+                                              float *volume,
+                                              bool *muted)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1569,6 +1570,7 @@
     outputDesc->addClient(clientDesc);
 
     *volume = Volume::DbToAmpl(outputDesc->getCurVolume(toVolumeSource(resultAttr)));
+    *muted = outputDesc->isMutedByGroup(toVolumeSource(resultAttr));
 
     ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
           *output, requestedPortId, *selectedDeviceId, *portId);
@@ -2617,8 +2619,7 @@
         auto &curves = getVolumeCurves(client->attributes());
         if (NO_ERROR != checkAndSetVolume(curves, client->volumeSource(),
                           curves.getVolumeIndex(outputDesc->devices().types()),
-                          outputDesc,
-                          outputDesc->devices().types(), 0 /*delay*/,
+                          outputDesc, outputDesc->devices().types(), 0 /*delay*/,
                           outputDesc->useHwGain() /*force*/)) {
             // request AudioService to reinitialize the volume curves asynchronously
             ALOGE("checkAndSetVolume failed, requesting volume range init");
@@ -3591,6 +3592,7 @@
 
 status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream,
                                                   int index,
+                                                  bool muted,
                                                   audio_devices_t device)
 {
     auto attributes = mEngine->getAttributesForStreamType(stream);
@@ -3600,7 +3602,7 @@
     }
     ALOGV("%s: stream %s attributes=%s, index %d , device 0x%X", __func__,
           toString(stream).c_str(), toString(attributes).c_str(), index, device);
-    return setVolumeIndexForAttributes(attributes, index, device);
+    return setVolumeIndexForAttributes(attributes, index, muted, device);
 }
 
 status_t AudioPolicyManager::getStreamVolumeIndex(audio_stream_type_t stream,
@@ -3619,6 +3621,7 @@
 
 status_t AudioPolicyManager::setVolumeIndexForAttributes(const audio_attributes_t &attributes,
                                                          int index,
+                                                         bool muted,
                                                          audio_devices_t device)
 {
     // Get Volume group matching the Audio Attributes
@@ -3629,6 +3632,11 @@
     }
     ALOGV("%s: group %d matching with %s index %d",
             __FUNCTION__, group, toString(attributes).c_str(), index);
+    if (mEngine->getStreamTypeForAttributes(attributes) == AUDIO_STREAM_PATCH) {
+        ALOGV("%s: cannot change volume for PATCH stream, attrs: %s",
+                __FUNCTION__, toString(attributes).c_str());
+        return NO_ERROR;
+    }
     status_t status = NO_ERROR;
     IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
@@ -3638,7 +3646,8 @@
             toVolumeSource(AUDIO_STREAM_VOICE_CALL, false) : vs;
     product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
 
-    status = setVolumeCurveIndex(index, device, curves);
+
+    status = setVolumeCurveIndex(index, muted, device, curves);
     if (status != NO_ERROR) {
         ALOGE("%s failed to set curve index for group %d device 0x%X", __func__, group, device);
         return status;
@@ -3700,8 +3709,9 @@
         // HW Gain management, do not change the volume
         if (desc->useHwGain()) {
             applyVolume = false;
+            bool swMute = com_android_media_audio_ring_my_car() ? curves.isMuted() : (index == 0);
             // If the volume source is active with higher priority source, ensure at least Sw Muted
-            desc->setSwMute((index == 0), vs, curves.getStreamTypes(), curDevices, 0 /*delayMs*/);
+            desc->setSwMute(swMute, vs, curves.getStreamTypes(), curDevices, 0 /*delayMs*/);
             for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
                 auto activeClients = desc->clientsList(true /*activeOnly*/, productStrategy,
                                                        false /*preferredDevice*/);
@@ -3739,8 +3749,7 @@
         //FIXME: workaround for truncated touch sounds
         // delayed volume change for system stream to be removed when the problem is
         // handled by system UI
-        status_t volStatus = checkAndSetVolume(
-                    curves, vs, index, desc, curDevices,
+        status_t volStatus = checkAndSetVolume(curves, vs, index, desc, curDevices,
                     ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM, false))?
                          TOUCH_SOUND_FIXED_DELAY_MS : 0));
         if (volStatus != NO_ERROR) {
@@ -3769,6 +3778,7 @@
 }
 
 status_t AudioPolicyManager::setVolumeCurveIndex(int index,
+                                                 bool muted,
                                                  audio_devices_t device,
                                                  IVolumeCurves &volumeCurves)
 {
@@ -3788,8 +3798,9 @@
     // Force max volume if stream cannot be muted
     if (!volumeCurves.canBeMuted()) index = volumeCurves.getVolumeIndexMax();
 
-    ALOGV("%s device %08x, index %d", __FUNCTION__ , device, index);
+    ALOGV("%s device %08x, index %d, muted %d", __FUNCTION__ , device, index, muted);
     volumeCurves.addCurrentVolumeIndex(device, index);
+    volumeCurves.setIsMuted(muted);
     return NO_ERROR;
 }
 
@@ -7898,7 +7909,8 @@
         }
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-            setVolumeSourceMute(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/, DeviceTypeSet());
+            setVolumeSourceMutedInternally(ttsVolumeSource, mute/*on*/, desc, 0 /*delay*/,
+                                           DeviceTypeSet());
             const uint32_t latency = desc->latency() * 2;
             if (desc->isActive(latency * 2) && latency > maxLatency) {
                 maxLatency = latency;
@@ -7992,9 +8004,10 @@
         for (const auto &activeVs : outputDesc->getActiveVolumeSources()) {
             // make sure that we do not start the temporary mute period too early in case of
             // delayed device change
-            setVolumeSourceMute(activeVs, true, outputDesc, delayMs);
-            setVolumeSourceMute(activeVs, false, outputDesc, delayMs + tempMuteDurationMs,
-                                devices.types());
+            setVolumeSourceMutedInternally(activeVs, true, outputDesc, delayMs);
+            setVolumeSourceMutedInternally(activeVs, false, outputDesc,
+                                           delayMs + tempMuteDurationMs,
+                                           devices.types());
         }
     }
 
@@ -8507,7 +8520,7 @@
     static std::set<IVolumeCurves*> invalidCurvesReported;
 
     // do not change actual attributes volume if the attributes is muted
-    if (outputDesc->isMuted(volumeSource)) {
+    if (!com_android_media_audio_ring_my_car() && outputDesc->isMutedInternally(volumeSource)) {
         ALOGVV("%s: volume source %d muted count %d active=%d", __func__, volumeSource,
                outputDesc->getMuteCount(volumeSource), outputDesc->isActive(volumeSource));
         return NO_ERROR;
@@ -8549,7 +8562,13 @@
                     || isSingleDeviceType(deviceTypes, audio_is_ble_out_device)))) {
         volumeDb = 0.0f;
     }
-    const bool muted = (index == 0) && (volumeDb != 0.0f);
+
+    bool muted;
+    if (!com_android_media_audio_ring_my_car()) {
+        muted = (index == 0) && (volumeDb != 0.0f);
+    } else {
+        muted = curves.isMuted();
+    }
     outputDesc->setVolume(volumeDb, muted, volumeSource, curves.getStreamTypes(),
             deviceTypes, delayMs, force, isVoiceVolSrc);
 
@@ -8564,6 +8583,11 @@
 void AudioPolicyManager::setVoiceVolume(
         int index, IVolumeCurves &curves, bool voiceVolumeManagedByHost, int delayMs) {
     float voiceVolume;
+
+    if (com_android_media_audio_ring_my_car() && curves.isMuted()) {
+        index = 0;
+    }
+
     // Force voice volume to max or mute for Bluetooth SCO/BLE as other attenuations are managed
     // by the headset
     if (voiceVolumeManagedByHost) {
@@ -8617,8 +8641,7 @@
     ALOGVV("applyStreamVolumes() for device %s", dumpDeviceTypes(deviceTypes).c_str());
     for (const auto &volumeGroup : mEngine->getVolumeGroups()) {
         auto &curves = getVolumeCurves(toVolumeSource(volumeGroup));
-        checkAndSetVolume(curves, toVolumeSource(volumeGroup),
-                          curves.getVolumeIndex(deviceTypes),
+        checkAndSetVolume(curves, toVolumeSource(volumeGroup), curves.getVolumeIndex(deviceTypes),
                           outputDesc, deviceTypes, delayMs, force);
     }
 }
@@ -8641,23 +8664,23 @@
         }
     }
     for (auto source : sourcesToMute) {
-        setVolumeSourceMute(source, on, outputDesc, delayMs, deviceTypes);
+        setVolumeSourceMutedInternally(source, on, outputDesc, delayMs, deviceTypes);
     }
 
 }
 
-void AudioPolicyManager::setVolumeSourceMute(VolumeSource volumeSource,
-                                             bool on,
-                                             const sp<AudioOutputDescriptor>& outputDesc,
-                                             int delayMs,
-                                             DeviceTypeSet deviceTypes)
+void AudioPolicyManager::setVolumeSourceMutedInternally(VolumeSource volumeSource,
+                                                        bool on,
+                                                        const sp<AudioOutputDescriptor>& outputDesc,
+                                                        int delayMs,
+                                                        DeviceTypeSet deviceTypes)
 {
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
     }
     auto &curves = getVolumeCurves(volumeSource);
     if (on) {
-        if (!outputDesc->isMuted(volumeSource)) {
+        if (!outputDesc->isMutedInternally(volumeSource)) {
             if (curves.canBeMuted() &&
                     (volumeSource != toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE, false) ||
                      (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) ==
@@ -8669,7 +8692,7 @@
         // ignored
         outputDesc->incMuteCount(volumeSource);
     } else {
-        if (!outputDesc->isMuted(volumeSource)) {
+        if (!outputDesc->isMutedInternally(volumeSource)) {
             ALOGV("%s unmuting non muted attributes!", __func__);
             return;
         }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 9ad2ea5..20f7b12 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -129,7 +129,8 @@
                                   output_type_t *outputType,
                                   bool *isSpatialized,
                                   bool *isBitPerfect,
-                                  float *volume) override;
+                                  float *volume,
+                                  bool *muted) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual bool releaseOutput(audio_port_handle_t portId);
@@ -169,6 +170,7 @@
         virtual void initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax);
         virtual status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                               int index,
+                                              bool muted,
                                               audio_devices_t device);
         virtual status_t getStreamVolumeIndex(audio_stream_type_t stream,
                                               int *index,
@@ -176,6 +178,7 @@
 
         virtual status_t setVolumeIndexForAttributes(const audio_attributes_t &attr,
                                                      int index,
+                                                     bool muted,
                                                      audio_devices_t device);
         virtual status_t getVolumeIndexForAttributes(const audio_attributes_t &attr,
                                                      int &index,
@@ -185,6 +188,7 @@
         virtual status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
 
         status_t setVolumeCurveIndex(int index,
+                                     bool muted,
                                      audio_devices_t device,
                                      IVolumeCurves &volumeCurves);
 
@@ -650,7 +654,8 @@
                              DeviceTypeSet deviceTypes = DeviceTypeSet());
 
         /**
-         * @brief setVolumeSourceMute Mute or unmute the volume source on the specified output
+         * @brief setVolumeSourceMutedInternally Mute or unmute the volume source on the specified
+         * output
          * @param volumeSource to be muted/unmute (may host legacy streams or by extension set of
          * audio attributes)
          * @param on true to mute, false to umute
@@ -658,11 +663,11 @@
          * @param delayMs
          * @param device
          */
-        void setVolumeSourceMute(VolumeSource volumeSource,
-                                 bool on,
-                                 const sp<AudioOutputDescriptor>& outputDesc,
-                                 int delayMs = 0,
-                                 DeviceTypeSet deviceTypes = DeviceTypeSet());
+        void setVolumeSourceMutedInternally(VolumeSource volumeSource,
+                                            bool on,
+                                            const sp<AudioOutputDescriptor>& outputDesc,
+                                            int delayMs = 0,
+                                            DeviceTypeSet deviceTypes = DeviceTypeSet());
 
         audio_mode_t getPhoneState();
 
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 363dfa7..3ed247b 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -186,21 +186,19 @@
 }
 
 status_t AudioPolicyService::AudioPolicyClient::setStreamVolume(audio_stream_type_t stream,
-                     float volume, audio_io_handle_t output,
-                     int delay_ms)
+                     float volume, bool muted, audio_io_handle_t output, int delay_ms)
 {
-    return mAudioPolicyService->setStreamVolume(stream, volume, output,
-                                               delay_ms);
+    return mAudioPolicyService->setStreamVolume(stream, volume, muted, output, delay_ms);
 }
 
 status_t AudioPolicyService::AudioPolicyClient::setPortsVolume(
-        const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
-        int delayMs)
+        const std::vector<audio_port_handle_t> &ports, float volume, bool muted,
+        audio_io_handle_t output, int delayMs)
 {
     if (ports.empty()) {
         return NO_ERROR;
     }
-    return mAudioPolicyService->setPortsVolume(ports, volume, output, delayMs);
+    return mAudioPolicyService->setPortsVolume(ports, volume, muted, output, delayMs);
 }
 
 void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 24ab6a1..8fbe1cc 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -430,6 +430,7 @@
     bool isSpatialized = false;
     bool isBitPerfect = false;
     float volume;
+    bool muted;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
                                                             attributionSource,
@@ -439,7 +440,8 @@
                                                             &outputType,
                                                             &isSpatialized,
                                                             &isBitPerfect,
-                                                            &volume);
+                                                            &volume,
+                                                            &muted);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -504,6 +506,7 @@
         _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_attributes_t_AudioAttributes(attr));
         _aidl_return->volume = volume;
+        _aidl_return->muted = muted;
     } else {
         _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
@@ -1170,7 +1173,7 @@
 
 Status AudioPolicyService::setStreamVolumeIndex(AudioStreamType streamAidl,
                                                 const AudioDeviceDescription& deviceAidl,
-                                                int32_t indexAidl) {
+                                                int32_t indexAidl, bool muted) {
     audio_stream_type_t stream = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioStreamType_audio_stream_type_t(streamAidl));
     int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
@@ -1192,6 +1195,7 @@
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->setStreamVolumeIndex(stream,
                                                                              index,
+                                                                             muted,
                                                                              device));
 }
 
@@ -1220,7 +1224,7 @@
 
 Status AudioPolicyService::setVolumeIndexForAttributes(
         const media::audio::common::AudioAttributes& attrAidl,
-        const AudioDeviceDescription& deviceAidl, int32_t indexAidl) {
+        const AudioDeviceDescription& deviceAidl, int32_t indexAidl, bool muted) {
     audio_attributes_t attributes = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioAttributes_audio_attributes_t(attrAidl));
     int index = VALUE_OR_RETURN_BINDER_STATUS(convertIntegral<int>(indexAidl));
@@ -1240,7 +1244,7 @@
     audio_utils::lock_guard _l(mMutex);
     AutoCallerClear acc;
     return binderStatusFromStatusT(
-            mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, device));
+            mAudioPolicyManager->setVolumeIndexForAttributes(attributes, index, muted, device));
 }
 
 Status AudioPolicyService::getVolumeIndexForAttributes(
@@ -1687,6 +1691,19 @@
     return Status::ok();
 }
 
+template <typename Port>
+void anonymizePortBluetoothAddress(Port& port) {
+    if (port.type != AUDIO_PORT_TYPE_DEVICE) {
+        return;
+    }
+    if (!(audio_is_a2dp_device(port.ext.device.type)
+            || audio_is_ble_device(port.ext.device.type)
+            || audio_is_bluetooth_sco_device(port.ext.device.type)
+            || audio_is_hearing_aid_out_device(port.ext.device.type))) {
+        return;
+    }
+    anonymizeBluetoothAddress(port.ext.device.address);
+}
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
                                           media::AudioPortType typeAidl, Int* count,
@@ -1705,14 +1722,27 @@
     std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
     unsigned int generation;
 
-    audio_utils::lock_guard _l(mMutex);
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
+    const AttributionSourceState attributionSource = getCallingAttributionSource();
     AutoCallerClear acc;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
-    numPortsReq = std::min(numPortsReq, num_ports);
+    {
+        audio_utils::lock_guard _l(mMutex);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        // AudioPolicyManager->listAudioPorts makes a deep copy of port structs into ports
+        // so it is safe to access after releasing the mutex
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+                mAudioPolicyManager->listAudioPorts(
+                        role, type, &num_ports, ports.get(), &generation)));
+        numPortsReq = std::min(numPortsReq, num_ports);
+    }
+
+    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+        for (size_t i = 0; i < numPortsReq; ++i) {
+            anonymizePortBluetoothAddress(ports[i]);
+        }
+    }
+
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
                          legacy2aidl_audio_port_v7_AudioPortFw)));
@@ -1735,12 +1765,24 @@
 Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
-    audio_utils::lock_guard _l(mMutex);
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
+
+    const AttributionSourceState attributionSource = getCallingAttributionSource();
     AutoCallerClear acc;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+
+    {
+        audio_utils::lock_guard _l(mMutex);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        // AudioPolicyManager->getAudioPort makes a deep copy of the port struct into port
+        // so it is safe to access after releasing the mutex
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+    }
+
+    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+        anonymizePortBluetoothAddress(port);
+    }
+
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
     return Status::ok();
 }
@@ -1802,14 +1844,32 @@
     std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
     unsigned int generation;
 
-    audio_utils::lock_guard _l(mMutex);
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
+    const AttributionSourceState attributionSource = getCallingAttributionSource();
     AutoCallerClear acc;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
-    numPatchesReq = std::min(numPatchesReq, num_patches);
+
+    {
+        audio_utils::lock_guard _l(mMutex);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        // AudioPolicyManager->listAudioPatches makes a deep copy of patches structs into patches
+        // so it is safe to access after releasing the mutex
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+                mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
+        numPatchesReq = std::min(numPatchesReq, num_patches);
+    }
+
+    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
+        for (size_t i = 0; i < numPatchesReq; ++i) {
+            for (size_t j = 0; j < patches[i].num_sources; ++j) {
+                anonymizePortBluetoothAddress(patches[i].sources[j]);
+            }
+            for (size_t j = 0; j < patches[i].num_sinks; ++j) {
+                anonymizePortBluetoothAddress(patches[i].sinks[j]);
+            }
+        }
+    }
+
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(patches.get(), patches.get() + numPatchesReq,
                          std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 7b7275e..290a036 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -1041,6 +1041,9 @@
             current->attributionSource.uid == topSensitiveActive->attributionSource.uid;
         bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
             current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
+        bool isActiveAssistant =
+                (useActiveAssistantList && mUidPolicy->isActiveAssistantUid(currentUid))
+                    || mUidPolicy->isAssistantUid(currentUid);
 
         // TODO: b/339112720
         // Refine this logic when we have the correct phone state owner UID. The current issue is
@@ -1069,7 +1072,7 @@
         const bool allowSensitiveCapture =
             !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
         bool allowCapture = false;
-        if (!isAssistantOnTop) {
+        if (!isAssistantOnTop || isActiveAssistant) {
             allowCapture = (isTopOrLatestActive || isTopOrLatestSensitive) &&
                            allowSensitiveCapture && canCaptureIfInCallOrCommunication;
         } else {
@@ -1817,6 +1820,7 @@
                     ul.unlock();
                     command->mStatus = AudioSystem::setStreamVolume(data->mStream,
                                                                     data->mVolume,
+                                                                    data->mIsMuted,
                                                                     data->mIO);
                     ul.lock();
                     }break;
@@ -1827,6 +1831,7 @@
                     ul.unlock();
                     command->mStatus = AudioSystem::setPortsVolume(data->mPorts,
                                                                    data->mVolume,
+                                                                   data->mMuted,
                                                                    data->mIO);
                     ul.lock();
                     } break;
@@ -2147,6 +2152,7 @@
 
 status_t AudioPolicyService::AudioCommandThread::volumeCommand(audio_stream_type_t stream,
                                                                float volume,
+                                                               bool muted,
                                                                audio_io_handle_t output,
                                                                int delayMs)
 {
@@ -2155,6 +2161,7 @@
     sp<VolumeData> data = new VolumeData();
     data->mStream = stream;
     data->mVolume = volume;
+    data->mIsMuted = muted;
     data->mIO = output;
     command->mParam = data;
     command->mWaitStatus = true;
@@ -2164,14 +2171,15 @@
 }
 
 status_t AudioPolicyService::AudioCommandThread::volumePortsCommand(
-        const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
-        int delayMs)
+        const std::vector<audio_port_handle_t> &ports, float volume, bool muted,
+        audio_io_handle_t output, int delayMs)
 {
     sp<AudioCommand> command = new AudioCommand();
     command->mCommand = SET_PORTS_VOLUME;
     sp<VolumePortsData> data = new VolumePortsData();
     data->mPorts = ports;
     data->mVolume = volume;
+    data->mMuted = muted;
     data->mIO = output;
     command->mParam = data;
     command->mWaitStatus = true;
@@ -2675,17 +2683,18 @@
 
 int AudioPolicyService::setStreamVolume(audio_stream_type_t stream,
                                         float volume,
+                                        bool muted,
                                         audio_io_handle_t output,
                                         int delayMs)
 {
-    return (int)mAudioCommandThread->volumeCommand(stream, volume,
+    return (int)mAudioCommandThread->volumeCommand(stream, volume, muted,
                                                    output, delayMs);
 }
 
 int AudioPolicyService::setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
-                                       audio_io_handle_t output, int delayMs)
+                                       bool muted, audio_io_handle_t output, int delayMs)
 {
-    return (int)mAudioCommandThread->volumePortsCommand(ports, volume, output, delayMs);
+    return (int)mAudioCommandThread->volumePortsCommand(ports, volume, muted, output, delayMs);
 }
 
 int AudioPolicyService::setVoiceVolume(float volume, int delayMs)
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index c47b5e9..2ce82c0 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -136,13 +136,13 @@
                                     int32_t indexMax) override;
     binder::Status setStreamVolumeIndex(AudioStreamType stream,
                                         const AudioDeviceDescription& device,
-                                        int32_t index) override;
+                                        int32_t index, bool muted) override;
     binder::Status getStreamVolumeIndex(AudioStreamType stream,
                                         const AudioDeviceDescription& device,
                                         int32_t* _aidl_return) override;
     binder::Status setVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
                                                const AudioDeviceDescription& device,
-                                               int32_t index) override;
+                                               int32_t index, bool muted) override;
     binder::Status getVolumeIndexForAttributes(const media::audio::common::AudioAttributes& attr,
                                                const AudioDeviceDescription& device,
                                                int32_t* _aidl_return) override;
@@ -353,6 +353,7 @@
 
     virtual status_t setStreamVolume(audio_stream_type_t stream,
                                      float volume,
+                                     bool muted,
                                      audio_io_handle_t output,
                                      int delayMs = 0);
 
@@ -364,12 +365,13 @@
      *
      * @param ports to consider
      * @param volume to set
+     * @param muted to set
      * @param output to consider
      * @param delayMs to use
      * @return NO_ERROR if successful
      */
     virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
-            audio_io_handle_t output, int delayMs = 0);
+            bool muted, audio_io_handle_t output, int delayMs = 0);
     virtual status_t setVoiceVolume(float volume, int delayMs = 0);
 
     void doOnNewAudioModulesAvailable();
@@ -625,10 +627,10 @@
         virtual     bool        threadLoop();
 
                     void        exit();
-                    status_t    volumeCommand(audio_stream_type_t stream, float volume,
+                    status_t    volumeCommand(audio_stream_type_t stream, float volume, bool muted,
                                             audio_io_handle_t output, int delayMs = 0);
                     status_t    volumePortsCommand(const std::vector<audio_port_handle_t> &ports,
-                            float volume, audio_io_handle_t output, int delayMs = 0);
+                            float volume, bool muted, audio_io_handle_t output, int delayMs = 0);
                     status_t    parametersCommand(audio_io_handle_t ioHandle,
                                             const char *keyValuePairs, int delayMs = 0);
                     status_t    voiceVolumeCommand(float volume, int delayMs = 0);
@@ -700,6 +702,7 @@
         public:
             audio_stream_type_t mStream;
             float mVolume;
+            bool mIsMuted;
             audio_io_handle_t mIO;
         };
 
@@ -707,13 +710,15 @@
         public:
             std::vector<audio_port_handle_t> mPorts;
             float mVolume;
+            bool mMuted;
             audio_io_handle_t mIO;
             std::string dumpPorts() {
-                return std::string("volume ") + std::to_string(mVolume) + " on IO " +
-                        std::to_string(mIO) + " and ports " +
-                        std::accumulate(std::begin(mPorts), std::end(mPorts), std::string{},
-                                       [] (const std::string& ls, int rs) {
-                                return ls + std::to_string(rs) + " "; });
+                return std::string("volume ") + std::to_string(mVolume) + std::string("muted ") +
+                       std::to_string(mMuted) + " on IO " + std::to_string(mIO) + " and ports " +
+                       std::accumulate(std::begin(mPorts), std::end(mPorts), std::string{},
+                                       [](const std::string &ls, int rs) {
+                                           return ls + std::to_string(rs) + " ";
+                                       });
             }
         };
 
@@ -854,9 +859,10 @@
         // misc control functions
         //
 
-        // set a stream volume for a particular output. For the same user setting, a given stream type can have different volumes
-        // for each output (destination device) it is attached to.
-        virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
+        // set a stream volume for a particular output. For the same user setting, a given stream
+        // type can have different volumes for each output (destination device) it is attached to.
+        virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, bool muted,
+                audio_io_handle_t output, int delayMs = 0);
         /**
          * Set a volume on port(s) for a particular output. For the same user setting, a volume
          * group (and associated given port of the client's track) can have different volumes for
@@ -864,12 +870,13 @@
          *
          * @param ports to consider
          * @param volume to set
+         * @param muted to set
          * @param output to consider
          * @param delayMs to use
          * @return NO_ERROR if successful
          */
         status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
-                audio_io_handle_t output, int delayMs = 0) override;
+                bool muted, audio_io_handle_t output, int delayMs = 0) override;
 
         // function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
         virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 154b063..182dc61 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -29,7 +29,6 @@
         "libbase",
         "libbinder",
         "libcutils",
-        "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia_helper",
@@ -41,6 +40,7 @@
     static_libs: [
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
+        "com.android.media.audio-aconfig-cc",
         "com.android.media.audioserver-aconfig-cc",
         "libaudiopolicycomponents",
         "libflagtest",
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index 9ddfd6c..5290da2 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -56,11 +56,13 @@
     status_t closeInput(audio_io_handle_t /*input*/) override { return NO_INIT; }
     status_t setStreamVolume(audio_stream_type_t /*stream*/,
                              float /*volume*/,
+                             bool /*muted*/,
                              audio_io_handle_t /*output*/,
                              int /*delayMs*/) override { return NO_INIT; }
 
-    status_t setPortsVolume(const std::vector<audio_port_handle_t>& /*ports*/, float /*volume*/,
-            audio_io_handle_t /*output*/, int /*delayMs*/) override { return NO_INIT; }
+    status_t setPortsVolume(const std::vector<audio_port_handle_t> & /*ports*/, float /*volume*/,
+                            bool /*muted*/, audio_io_handle_t /*output*/,
+                            int /*delayMs*/) override { return NO_INIT; }
 
     void setParameters(audio_io_handle_t /*ioHandle*/,
                        const String8& /*keyValuePairs*/,
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 39f9b8a..d83a277 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -310,11 +310,13 @@
     bool isSpatialized;
     bool isBitPerfectInternal;
     float volume;
+    bool muted;
     AttributionSourceState attributionSource = createAttributionSourceState(uid);
     ASSERT_EQ(OK, mManager->getOutputForAttr(
                     &attr, output, session, &stream, attributionSource, &config, &flags,
                     selectedDeviceId, portId, {}, &outputType, &isSpatialized,
-                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume));
+                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume,
+                    &muted));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
 }
@@ -2107,6 +2109,7 @@
     bool mIsSpatialized;
     bool mIsBitPerfect;
     float mVolume;
+    bool mMuted;
 };
 
 TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingLoopbackDapMixFails) {
@@ -2125,7 +2128,8 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+                                         &mMuted));
 }
 
 TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2144,7 +2148,8 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+                                         &mMuted));
 }
 
 TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2175,7 +2180,8 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+                                         &mMuted));
     ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
     auto outputDesc = mManager->getOutputs().valueFor(mOutput);
     ASSERT_NE(nullptr, outputDesc);
@@ -2191,7 +2197,8 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+                                         &mMuted));
     ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
     outputDesc = mManager->getOutputs().valueFor(mOutput);
     ASSERT_NE(nullptr, outputDesc);
@@ -2220,7 +2227,8 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume,
+                                         &mMuted));
 }
 
 INSTANTIATE_TEST_SUITE_P(
@@ -3978,11 +3986,12 @@
     bool isSpatialized;
     bool isBitPerfect;
     float volume;
+    bool muted;
     EXPECT_EQ(expected,
               mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
                                          &stream, attributionSource, &config, &flags,
                                          &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
-                                         &isSpatialized, &isBitPerfect, &volume));
+                                         &isSpatialized, &isBitPerfect, &volume, &muted));
 }
 
 class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index fa5eb6f..17ec41e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -95,6 +95,7 @@
     const char* kProcessInfoServiceName = "processinfo";
     const char* kVirtualDeviceBackCameraId = "0";
     const char* kVirtualDeviceFrontCameraId = "1";
+    const char* kUnknownPackageName = "<unknown>";
 
     int32_t getDeviceId(const android::CameraMetadata& cameraInfo) {
         if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
@@ -1605,14 +1606,18 @@
     int callingPid = getCallingPid();
     logConnectionAttempt(callingPid, kServiceName, cameraIdStr, API_1);
 
-    if (!(ret = connectHelper<ICameraClient,Client>(
-            sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
-            kServiceName, /*systemNativeClient*/ false, {}, uid, callingPid,
-            API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
-            /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
-            /*forceSlowJpegMode*/false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp)
-            ).isOk()) {
+    AttributionSourceState clientAttribution =
+            buildAttributionSource(callingPid, uid, kServiceName, kDefaultDeviceId);
+
+    if (!(ret = connectHelper<ICameraClient, Client>(
+                  sp<ICameraClient>{nullptr}, cameraIdStr, cameraId, clientAttribution,
+                  /*systemNativeClient*/ false, API_1, /*shimUpdateOnly*/ true,
+                  /*oomScoreOffset*/ 0,
+                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+                  /*rotationOverride*/
+                  hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+                  /*forceSlowJpegMode*/ false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp))
+                 .isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
     return ret;
@@ -1681,14 +1686,11 @@
 }
 
 Status CameraService::validateConnectLocked(const std::string& cameraId,
-        const std::string& clientName8, int clientUid, int clientPid) const {
-
+                                            const AttributionSourceState& clientAttribution) const {
 #ifdef __BRILLO__
-    UNUSED(clientName8);
-    UNUSED(clientUid);
-    UNUSED(clientPid);
+    UNUSED(clientAttribution);
 #else
-    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid);
+    Status allowed = validateClientPermissionsLocked(cameraId, clientAttribution);
     if (!allowed.isOk()) {
         return allowed;
     }
@@ -1725,11 +1727,15 @@
     return Status::ok();
 }
 
-Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
-        const std::string& clientName, int clientUid, int clientPid) const {
+Status CameraService::validateClientPermissionsLocked(
+        const std::string& cameraId, const AttributionSourceState& clientAttribution) const {
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
 
+    int clientPid = clientAttribution.pid;
+    int clientUid = clientAttribution.uid;
+    const std::string clientName = clientAttribution.packageName.value_or(kUnknownPackageName);
+
     if (shouldRejectSystemCameraConnection(cameraId)) {
         ALOGW("Attempting to connect to system-only camera id %s, connection rejected",
                 cameraId.c_str());
@@ -1745,12 +1751,14 @@
 
     // Get the device id that owns this camera.
     auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+    AttributionSourceState clientAttributionWithDeviceId = clientAttribution;
+    clientAttributionWithDeviceId.deviceId = deviceId;
 
     // If it's not calling from cameraserver, check the permission if the
     // device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
     // android.permission.SYSTEM_CAMERA for system only camera devices).
     bool checkPermissionForCamera =
-            hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName, deviceId);
+            hasPermissionsForCamera(cameraId, clientAttributionWithDeviceId);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
@@ -2122,7 +2130,7 @@
     ret = resolveAttributionSource(resolvedClientAttribution, __FUNCTION__, cameraIdStr);
     if (!ret.isOk()) {
         logRejected(cameraIdStr, getCallingPid(),
-                    clientAttribution.packageName.value_or("<unknown>"),
+                    clientAttribution.packageName.value_or(kUnknownPackageName),
                     toStdString(ret.toString8()));
         return ret;
     }
@@ -2134,15 +2142,15 @@
     logConnectionAttempt(clientPid, clientPackageName, cameraIdStr, API_1);
 
     sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
-            clientPackageName, /*systemNativeClient*/ false, {},
-            clientUid, clientPid, API_1,
-            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            rotationOverride, forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/client);
+    ret = connectHelper<ICameraClient, Client>(
+            cameraClient, cameraIdStr, api1CameraId, resolvedClientAttribution,
+            /*systemNativeClient*/ false, API_1,
+            /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion, rotationOverride,
+            forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/ client);
 
     if (!ret.isOk()) {
         logRejected(cameraIdStr, getCallingPid(),
-                    clientAttribution.packageName.value_or("<unknown>"),
+                    clientAttribution.packageName.value_or(kUnknownPackageName),
                     toStdString(ret.toString8()));
         return ret;
     }
@@ -2298,9 +2306,8 @@
     }
 
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks, CameraDeviceClient>(
-            cameraCb, cameraId, /*api1CameraId*/ -1, clientPackageName, systemNativeClient,
-            resolvedClientAttribution.attributionTag, clientUid, clientPid, API_2,
-            /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+            cameraCb, cameraId, /*api1CameraId*/ -1, resolvedClientAttribution, systemNativeClient,
+            API_2, /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
             /*forceSlowJpegMode*/ false, unresolvedCameraId, isNonSystemNdk, /*out*/ client);
 
     if (!ret.isOk()) {
@@ -2373,13 +2380,15 @@
           static_cast<int>(effectiveApiLevel));
 }
 
-template<class CALLBACK, class CLIENT>
+template <class CALLBACK, class CLIENT>
 Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-        int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
-        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
-        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        int rotationOverride, bool forceSlowJpegMode,
-        const std::string& originalCameraId, bool isNonSystemNdk, /*out*/sp<CLIENT>& device) {
+                                    int api1CameraId,
+                                    const AttributionSourceState& clientAttribution,
+                                    bool systemNativeClient, apiLevel effectiveApiLevel,
+                                    bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
+                                    int rotationOverride, bool forceSlowJpegMode,
+                                    const std::string& originalCameraId, bool isNonSystemNdk,
+                                    /*out*/ sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     nsecs_t openTimeNs = systemTime();
@@ -2388,22 +2397,25 @@
     int facing = -1;
     int orientation = 0;
 
+    const std::string clientPackageName =
+            clientAttribution.packageName.value_or(kUnknownPackageName);
+
     {
         // Acquire mServiceLock and prevent other clients from connecting
         std::unique_ptr<AutoConditionLock> lock =
                 AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS);
 
         if (lock == nullptr) {
-            ALOGE("CameraService::connect (PID %d) rejected (too many other clients connecting)."
-                    , clientPid);
-            return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
+            ALOGE("CameraService::connect (PID %d) rejected (too many other clients connecting).",
+                  clientAttribution.pid);
+            return STATUS_ERROR_FMT(
+                    ERROR_MAX_CAMERAS_IN_USE,
                     "Cannot open camera %s for \"%s\" (PID %d): Too many other clients connecting",
-                    cameraId.c_str(), clientPackageName.c_str(), clientPid);
+                    cameraId.c_str(), clientPackageName.c_str(), clientAttribution.pid);
         }
 
         // Enforce client permissions and do basic validity checks
-        if (!(ret = validateConnectLocked(cameraId, clientPackageName,
-                /*inout*/clientUid, /*inout*/clientPid)).isOk()) {
+        if (!(ret = validateConnectLocked(cameraId, clientAttribution)).isOk()) {
             return ret;
         }
 
@@ -2420,9 +2432,12 @@
 
         sp<BasicClient> clientTmp = nullptr;
         std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>> partial;
-        if ((err = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel,
-                IInterface::asBinder(cameraCb), clientPackageName, oomScoreOffset,
-                systemNativeClient, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
+        if ((err = handleEvictionsLocked(
+                     cameraId, clientAttribution.pid, effectiveApiLevel,
+                     IInterface::asBinder(cameraCb),
+                     clientAttribution.packageName.value_or(kUnknownPackageName), oomScoreOffset,
+                     systemNativeClient, /*out*/ &clientTmp,
+                     /*out*/ &partial)) != NO_ERROR) {
             switch (err) {
                 case -ENODEV:
                     return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
@@ -2468,12 +2483,13 @@
 
         // Only use passed in clientPid to check permission. Use calling PID as the client PID
         // that's connected to camera service directly.
-        if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, api1CameraId, facing,
-                orientation, getCallingPid(), clientUid, getpid(),
-                deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                rotationOverride, forceSlowJpegMode, originalCameraId,
-                /*out*/&tmp)).isOk()) {
+        if (!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
+                               clientAttribution.attributionTag, cameraId, api1CameraId, facing,
+                               orientation, getCallingPid(), clientAttribution.uid, getpid(),
+                               deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
+                               rotationOverride, forceSlowJpegMode, originalCameraId,
+                               /*out*/ &tmp))
+                     .isOk()) {
             return ret;
         }
         client = static_cast<CLIENT*>(tmp.get());
@@ -2574,8 +2590,9 @@
                 client->setRotateAndCropOverride(rotateAndCropMode);
             } else {
                 client->setRotateAndCropOverride(
-                    mCameraServiceProxyWrapper->getRotateAndCropOverride(
-                        clientPackageName, facing, multiuser_get_user_id(clientUid)));
+                        mCameraServiceProxyWrapper->getRotateAndCropOverride(
+                                clientPackageName, facing,
+                                multiuser_get_user_id(clientAttribution.uid)));
             }
         }
 
@@ -2600,8 +2617,9 @@
         bool isCameraPrivacyEnabled;
         if (flags::camera_privacy_allowlist()) {
             // Set camera muting behavior.
-            isCameraPrivacyEnabled = this->isCameraPrivacyEnabled(
-                    toString16(client->getPackageName()), cameraId, clientPid, clientUid);
+            isCameraPrivacyEnabled =
+                    this->isCameraPrivacyEnabled(toString16(client->getPackageName()), cameraId,
+                                                 clientAttribution.pid, clientAttribution.uid);
         } else {
             isCameraPrivacyEnabled =
                     mSensorPrivacyPolicy->isCameraPrivacyEnabled();
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 5eb2536..07c9d00 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -927,12 +927,10 @@
     void removeStates(const std::string& id);
 
     // Check if we can connect, before we acquire the service lock.
-    // If clientPid/clientUid are USE_CALLING_PID/USE_CALLING_UID, they will be overwritten with
-    // the calling pid/uid.
-    binder::Status validateConnectLocked(const std::string& cameraId, const std::string& clientName,
-            int clientUid, int clientPid) const;
-    binder::Status validateClientPermissionsLocked(const std::string& cameraId,
-            const std::string& clientName, int clientUid, int clientPid) const;
+    binder::Status validateConnectLocked(const std::string& cameraId,
+                                         const AttributionSourceState& clientAttribution) const;
+    binder::Status validateClientPermissionsLocked(
+            const std::string& cameraId, const AttributionSourceState& clientAttribution) const;
 
     void logConnectionAttempt(int clientPid, const std::string& clientPackageName,
         const std::string& cameraId, apiLevel effectiveApiLevel) const;
@@ -974,14 +972,14 @@
     void filterAPI1SystemCameraLocked(const std::vector<std::string> &normalDeviceIds);
 
     // Single implementation shared between the various connect calls
-    template<class CALLBACK, class CLIENT>
+    template <class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-            int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
-            const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
-            apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            int rotationOverride, bool forceSlowJpegMode,
-            const std::string& originalCameraId, bool isNonSystemNdk,
-            /*out*/sp<CLIENT>& device);
+                                 int api1CameraId, const AttributionSourceState& clientAttribution,
+                                 bool systemNativeClient, apiLevel effectiveApiLevel,
+                                 bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
+                                 int rotationOverride, bool forceSlowJpegMode,
+                                 const std::string& originalCameraId, bool isNonSystemNdk,
+                                 /*out*/ sp<CLIENT>& device);
 
     // Lock guarding camera service state
     Mutex               mServiceLock;
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
index 37903e1..b213218 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -32,7 +32,32 @@
 #include <hwbinder/IPCThreadState.h>
 
 namespace {
+
+using android::content::AttributionSourceState;
+
 static const std::string kPermissionServiceName = "permission";
+
+static std::string getAttributionString(const AttributionSourceState& attributionSource) {
+    std::ostringstream ret;
+    const AttributionSourceState* current = &attributionSource;
+    while (current != nullptr) {
+        if (current != &attributionSource) {
+            ret << ", ";
+        }
+
+        ret << "[uid " << current->uid << ", pid " << current->pid;
+        ret << ", packageName \"" << current->packageName.value_or("<unknown>");
+        ret << "\"]";
+
+        if (!current->next.empty()) {
+            current = &current->next[0];
+        } else {
+            current = nullptr;
+        }
+    }
+    return ret.str();
+}
+
 } // namespace
 
 namespace android {
@@ -111,13 +136,22 @@
         const std::string& cameraId, const std::string& permission,
         const AttributionSourceState& attributionSource, const std::string& message,
         int32_t attributedOpCode) {
-    if (checkAutomotivePrivilegedClient(cameraId, attributionSource)) {
+    AttributionSourceState clientAttribution = attributionSource;
+    if (!flags::check_full_attribution_source_chain() && !clientAttribution.next.empty()) {
+        clientAttribution.next.clear();
+    }
+
+    if (checkAutomotivePrivilegedClient(cameraId, clientAttribution)) {
         return true;
     }
 
-    return mPermissionChecker->checkPermissionForPreflight(
-                   toString16(permission), attributionSource, toString16(message),
-                   attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+    PermissionChecker::PermissionResult result = mPermissionChecker->checkPermissionForPreflight(
+            toString16(permission), clientAttribution, toString16(message), attributedOpCode);
+    if (result == PermissionChecker::PERMISSION_HARD_DENIED) {
+        ALOGE("%s: Permission denied for client attribution %s", __FUNCTION__,
+              getAttributionString(clientAttribution).c_str());
+    }
+    return result != PermissionChecker::PERMISSION_HARD_DENIED;
 }
 
 // Can camera service trust the caller based on the calling UID?
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 22abccc..9ed7fa2 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -222,8 +222,9 @@
 
     bool hasPermissionsForCamera(int callingPid, int callingUid, const std::string& packageName,
                                  int32_t deviceId) const {
-        return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName,
-                                       deviceId);
+        auto attributionSource =
+                buildAttributionSource(callingPid, callingUid, packageName, deviceId);
+        return hasPermissionsForCamera(std::string(), attributionSource);
     }
 
     bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
@@ -232,11 +233,9 @@
         return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
     }
 
-    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
-                                 const std::string& packageName, int32_t deviceId) const {
-        auto attributionSource =
-                buildAttributionSource(callingPid, callingUid, packageName, deviceId);
-        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    bool hasPermissionsForCamera(const std::string& cameraId,
+                                 const AttributionSourceState& clientAttribution) const {
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, clientAttribution);
     }
 
     bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index bf4a45d..becba90 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -557,8 +557,9 @@
 std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
     std::chrono::nanoseconds timeSinceLastFrame) {
   std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
-  if (surfaceTimestamp.count() < 0) {
-    uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
+  uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
+  if (surfaceTimestamp.count() < 0 ||
+      surfaceTimestamp.count() == lastSurfaceTimestamp) {
     if (lastSurfaceTimestamp > 0) {
       // The timestamps were provided by the producer but we are
       // repeating the last frame, so we increase the previous timestamp by
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index a35eea1..1fb4e84 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -221,8 +221,8 @@
   std::mutex mLock;
   std::deque<std::unique_ptr<ProcessCaptureRequestTask>> mQueue GUARDED_BY(mLock);
   std::condition_variable mCondVar;
-  volatile bool mTextureUpdateRequested GUARDED_BY(mLock);
-  volatile bool mPendingExit GUARDED_BY(mLock);
+  volatile bool GUARDED_BY(mLock) mTextureUpdateRequested = false;
+  volatile bool GUARDED_BY(mLock) mPendingExit = false;
 
   // Acquisition timestamp of last frame.
   std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
diff --git a/services/tuner/TunerHelper.cpp b/services/tuner/TunerHelper.cpp
index a03386f..8357a9e 100644
--- a/services/tuner/TunerHelper.cpp
+++ b/services/tuner/TunerHelper.cpp
@@ -73,7 +73,7 @@
 
 // TODO: update Demux, Descrambler.
 void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
-                                       const vector<int32_t>& lnbHandles) {
+                                       const vector<int64_t>& lnbHandles) {
     ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
     shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
     if (tunerRM == nullptr) {
@@ -85,7 +85,7 @@
 }
 void TunerHelper::updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
                                        const vector<TunerDemuxInfo>& demuxInfos,
-                                       const vector<int32_t>& lnbHandles) {
+                                       const vector<int64_t>& lnbHandles) {
     ::ndk::SpAIBinder binder(AServiceManager_waitForService("tv_tuner_resource_mgr"));
     shared_ptr<ITunerResourceManager> tunerRM = ITunerResourceManager::fromBinder(binder);
     if (tunerRM == nullptr) {
@@ -101,13 +101,22 @@
 }
 
 // TODO: create a map between resource id and handles.
-int TunerHelper::getResourceIdFromHandle(int resourceHandle, int /*type*/) {
-    return (resourceHandle & 0x00ff0000) >> 16;
+int TunerHelper::getResourceIdFromHandle(long resourceHandle, int /*type*/) {
+    return (int)((resourceHandle >> RESOURCE_ID_SHIFT) & RESOURCE_ID_MASK);
 }
 
-int TunerHelper::getResourceHandleFromId(int id, int resourceType) {
+/**
+ *   Generate resource handle for resourceType and id
+ *   Resource Handle Allotment : 64 bits (long)
+ *   8 bits - resourceType
+ *   32 bits - id
+ *   24 bits - resourceRequestCount
+ */
+long TunerHelper::getResourceHandleFromId(int id, int resourceType) {
     // TODO: build up randomly generated id to handle mapping
-    return (resourceType & 0x000000ff) << 24 | (id << 16) | (sResourceRequestCount++ & 0xffff);
+    return static_cast<int64_t>(resourceType & RESOURCE_TYPE_MASK) << RESOURCE_TYPE_SHIFT |
+           static_cast<int64_t>(id & RESOURCE_ID_MASK) << RESOURCE_ID_SHIFT |
+           (sResourceRequestCount++ & RESOURCE_COUNT_MASK);
 }
 
 }  // namespace tuner
diff --git a/services/tuner/TunerHelper.h b/services/tuner/TunerHelper.h
index 65a9b0b..74e1662 100644
--- a/services/tuner/TunerHelper.h
+++ b/services/tuner/TunerHelper.h
@@ -56,17 +56,23 @@
 
     // TODO: update Demux, Descrambler.
     static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
-                                     const vector<int32_t>& lnbHandles);
+                                     const vector<int64_t>& lnbHandles);
 
     static void updateTunerResources(const vector<TunerFrontendInfo>& feInfos,
                                      const vector<TunerDemuxInfo>& demuxInfos,
-                                     const vector<int32_t>& lnbHandles);
+                                     const vector<int64_t>& lnbHandles);
     // TODO: create a map between resource id and handles.
-    static int getResourceIdFromHandle(int resourceHandle, int type);
-    static int getResourceHandleFromId(int id, int resourceType);
+    static int getResourceIdFromHandle(long resourceHandle, int type);
+    static long getResourceHandleFromId(int id, int resourceType);
 
 private:
     static int32_t sResourceRequestCount;
+
+    static constexpr uint32_t RESOURCE_ID_SHIFT = 24;
+    static constexpr uint32_t RESOURCE_TYPE_SHIFT = 56;
+    static constexpr uint32_t RESOURCE_COUNT_MASK = 0xffffff;
+    static constexpr uint32_t RESOURCE_ID_MASK = 0xffffffff;
+    static constexpr uint32_t RESOURCE_TYPE_MASK = 0xff;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerService.cpp b/services/tuner/TunerService.cpp
index 9a1e8bb..8cf84e2 100644
--- a/services/tuner/TunerService.cpp
+++ b/services/tuner/TunerService.cpp
@@ -82,7 +82,7 @@
     return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
 }
 
-::ndk::ScopedAStatus TunerService::openDemux(int32_t in_demuxHandle,
+::ndk::ScopedAStatus TunerService::openDemux(int64_t in_demuxHandle,
                                              shared_ptr<ITunerDemux>* _aidl_return) {
     ALOGV("openDemux");
     shared_ptr<IDemux> demux;
@@ -116,7 +116,7 @@
     }
 }
 
-::ndk::ScopedAStatus TunerService::getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) {
+::ndk::ScopedAStatus TunerService::getDemuxInfo(int64_t in_demuxHandle, DemuxInfo* _aidl_return) {
     if (mTunerVersion <= TUNER_HAL_VERSION_2_0) {
         return ::ndk::ScopedAStatus::fromServiceSpecificError(
                 static_cast<int32_t>(Result::UNAVAILABLE));
@@ -169,7 +169,7 @@
     return mTuner->getFrontendInfo(id, _aidl_return);
 }
 
-::ndk::ScopedAStatus TunerService::openFrontend(int32_t frontendHandle,
+::ndk::ScopedAStatus TunerService::openFrontend(int64_t frontendHandle,
                                                 shared_ptr<ITunerFrontend>* _aidl_return) {
     int id = TunerHelper::getResourceIdFromHandle(frontendHandle, FRONTEND);
     shared_ptr<IFrontend> frontend;
@@ -181,7 +181,7 @@
     return status;
 }
 
-::ndk::ScopedAStatus TunerService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+::ndk::ScopedAStatus TunerService::openLnb(int64_t lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
     shared_ptr<ILnb> lnb;
     int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
     auto status = mTuner->openLnbById(id, &lnb);
@@ -204,7 +204,7 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
-::ndk::ScopedAStatus TunerService::openDescrambler(int32_t /*descramblerHandle*/,
+::ndk::ScopedAStatus TunerService::openDescrambler(int64_t /*descramblerHandle*/,
                                                    shared_ptr<ITunerDescrambler>* _aidl_return) {
     shared_ptr<IDescrambler> descrambler;
     // int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -310,7 +310,7 @@
             continue;
         }
         TunerFrontendInfo tunerFrontendInfo{
-                .handle = TunerHelper::getResourceHandleFromId((int)ids[i], FRONTEND),
+                .handle = TunerHelper::getResourceHandleFromId(ids[i], FRONTEND),
                 .type = static_cast<int>(frontendInfo.type),
                 .exclusiveGroupId = frontendInfo.exclusiveGroupId,
         };
@@ -336,18 +336,16 @@
     for (int i = 0; i < ids.size(); i++) {
         DemuxInfo demuxInfo;
         mTuner->getDemuxInfo(ids[i], &demuxInfo);
-        TunerDemuxInfo tunerDemuxInfo{
-                .handle = TunerHelper::getResourceHandleFromId((int)ids[i], DEMUX),
-                .filterTypes = static_cast<int>(demuxInfo.filterTypes)
-        };
+        TunerDemuxInfo tunerDemuxInfo{.handle = TunerHelper::getResourceHandleFromId(ids[i], DEMUX),
+                                      .filterTypes = static_cast<int>(demuxInfo.filterTypes)};
         infos.push_back(tunerDemuxInfo);
     }
 
     return infos;
 }
 
-vector<int32_t> TunerService::getTRMLnbHandles() {
-    vector<int32_t> lnbHandles;
+vector<int64_t> TunerService::getTRMLnbHandles() {
+    vector<int64_t> lnbHandles;
     if (mTuner != nullptr) {
         vector<int32_t> lnbIds;
         auto res = mTuner->getLnbIds(&lnbIds);
diff --git a/services/tuner/TunerService.h b/services/tuner/TunerService.h
index 190ccd4..07b414e 100644
--- a/services/tuner/TunerService.h
+++ b/services/tuner/TunerService.h
@@ -61,20 +61,20 @@
     virtual ~TunerService();
 
     ::ndk::ScopedAStatus getFrontendIds(vector<int32_t>* out_ids) override;
-    ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendHandle,
+    ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendId,
                                          FrontendInfo* _aidl_return) override;
-    ::ndk::ScopedAStatus openFrontend(int32_t in_frontendHandle,
+    ::ndk::ScopedAStatus openFrontend(int64_t in_frontendHandle,
                                       shared_ptr<ITunerFrontend>* _aidl_return) override;
-    ::ndk::ScopedAStatus openLnb(int32_t in_lnbHandle,
+    ::ndk::ScopedAStatus openLnb(int64_t in_lnbHandle,
                                  shared_ptr<ITunerLnb>* _aidl_return) override;
     ::ndk::ScopedAStatus openLnbByName(const string& in_lnbName,
                                        shared_ptr<ITunerLnb>* _aidl_return) override;
-    ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
+    ::ndk::ScopedAStatus openDemux(int64_t in_demuxHandle,
                                    shared_ptr<ITunerDemux>* _aidl_return) override;
     ::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
-    ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxInfo(int64_t in_demuxHandle, DemuxInfo* _aidl_return) override;
     ::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
-    ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
+    ::ndk::ScopedAStatus openDescrambler(int64_t in_descramblerHandle,
                                          shared_ptr<ITunerDescrambler>* _aidl_return) override;
     ::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
     ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
@@ -94,7 +94,7 @@
     void updateTunerResources();
     vector<TunerFrontendInfo> getTRMFrontendInfos();
     vector<TunerDemuxInfo> getTRMDemuxInfos();
-    vector<int32_t> getTRMLnbHandles();
+    vector<int64_t> getTRMLnbHandles();
 
     shared_ptr<ITuner> mTuner;
     int mTunerVersion = TUNER_HAL_VERSION_UNKNOWN;
diff --git a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
index 932133e..0d23817 100644
--- a/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
+++ b/services/tuner/aidl/android/media/tv/tuner/ITunerService.aidl
@@ -54,7 +54,7 @@
      * @param frontendHandle the handle of the frontend granted by TRM.
      * @return the aidl interface of the frontend.
      */
-    ITunerFrontend openFrontend(in int frontendHandle);
+    ITunerFrontend openFrontend(in long frontendHandle);
 
     /**
      * Open a new interface of ITunerLnb given a lnbHandle.
@@ -62,7 +62,7 @@
      * @param lnbHandle the handle of the LNB granted by TRM.
      * @return a newly created ITunerLnb interface.
      */
-    ITunerLnb openLnb(in int lnbHandle);
+    ITunerLnb openLnb(in long lnbHandle);
 
     /**
      * Open a new interface of ITunerLnb given a LNB name.
@@ -75,7 +75,7 @@
     /**
      * Create a new instance of Demux.
      */
-    ITunerDemux openDemux(in int demuxHandle);
+    ITunerDemux openDemux(in long demuxHandle);
 
     /**
      * Retrieve the supported filter main types
@@ -83,7 +83,7 @@
      * @param demuxHandle the handle of the demux to query demux info for
      * @return the demux info
      */
-    DemuxInfo getDemuxInfo(in int demuxHandle);
+    DemuxInfo getDemuxInfo(in long demuxHandle);
 
     /**
      * Retrieve the list of demux info for all the demuxes on the system
@@ -104,7 +104,7 @@
      * @param descramblerHandle the handle of the descrambler granted by TRM.
      * @return a newly created ITunerDescrambler interface.
      */
-    ITunerDescrambler openDescrambler(in int descramblerHandle);
+    ITunerDescrambler openDescrambler(in long descramblerHandle);
 
     /**
      * Get an integer that carries the Tuner HIDL version. The high 16 bits are the
diff --git a/services/tuner/hidl/TunerHidlService.cpp b/services/tuner/hidl/TunerHidlService.cpp
index 6bc36be..1b6b032 100644
--- a/services/tuner/hidl/TunerHidlService.cpp
+++ b/services/tuner/hidl/TunerHidlService.cpp
@@ -101,7 +101,7 @@
     return AServiceManager_addService(tunerService->asBinder().get(), getServiceName());
 }
 
-::ndk::ScopedAStatus TunerHidlService::openDemux(int32_t /* in_demuxHandle */,
+::ndk::ScopedAStatus TunerHidlService::openDemux(int64_t /* in_demuxHandle */,
                                                  shared_ptr<ITunerDemux>* _aidl_return) {
     ALOGV("openDemux");
     HidlResult res;
@@ -123,7 +123,7 @@
     return ::ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(res));
 }
 
-::ndk::ScopedAStatus TunerHidlService::getDemuxInfo(int32_t /* in_demuxHandle */,
+::ndk::ScopedAStatus TunerHidlService::getDemuxInfo(int64_t /* in_demuxHandle */,
                                                     DemuxInfo* /* _aidl_return */) {
     ALOGE("getDemuxInfo is not supported");
     return ::ndk::ScopedAStatus::fromServiceSpecificError(
@@ -195,7 +195,7 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
-::ndk::ScopedAStatus TunerHidlService::openFrontend(int32_t frontendHandle,
+::ndk::ScopedAStatus TunerHidlService::openFrontend(int64_t frontendHandle,
                                                     shared_ptr<ITunerFrontend>* _aidl_return) {
     HidlResult status;
     sp<HidlIFrontend> frontend;
@@ -221,7 +221,8 @@
     return ::ndk::ScopedAStatus::ok();
 }
 
-::ndk::ScopedAStatus TunerHidlService::openLnb(int lnbHandle, shared_ptr<ITunerLnb>* _aidl_return) {
+::ndk::ScopedAStatus TunerHidlService::openLnb(int64_t lnbHandle,
+                                               shared_ptr<ITunerLnb>* _aidl_return) {
     HidlResult status;
     sp<HidlILnb> lnb;
     int id = TunerHelper::getResourceIdFromHandle(lnbHandle, LNB);
@@ -256,7 +257,7 @@
 }
 
 ::ndk::ScopedAStatus TunerHidlService::openDescrambler(
-        int32_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
+        int64_t /*descramblerHandle*/, shared_ptr<ITunerDescrambler>* _aidl_return) {
     HidlResult status;
     sp<HidlIDescrambler> descrambler;
     //int id = TunerHelper::getResourceIdFromHandle(descramblerHandle, DESCRAMBLER);
@@ -410,8 +411,8 @@
     return infos;
 }
 
-vector<int32_t> TunerHidlService::getTRMLnbHandles() {
-    vector<int32_t> lnbHandles;
+vector<int64_t> TunerHidlService::getTRMLnbHandles() {
+    vector<int64_t> lnbHandles;
     if (mTuner != nullptr) {
         HidlResult res;
         vector<HidlLnbId> lnbIds;
diff --git a/services/tuner/hidl/TunerHidlService.h b/services/tuner/hidl/TunerHidlService.h
index 526c5e6..1973a77 100644
--- a/services/tuner/hidl/TunerHidlService.h
+++ b/services/tuner/hidl/TunerHidlService.h
@@ -73,20 +73,19 @@
     virtual ~TunerHidlService();
 
     ::ndk::ScopedAStatus getFrontendIds(vector<int32_t>* out_ids) override;
-    ::ndk::ScopedAStatus getFrontendInfo(int32_t in_frontendHandle,
-                                         FrontendInfo* _aidl_return) override;
-    ::ndk::ScopedAStatus openFrontend(int32_t in_frontendHandle,
+    ::ndk::ScopedAStatus getFrontendInfo(int32_t in_id, FrontendInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus openFrontend(int64_t in_frontendHandle,
                                       shared_ptr<ITunerFrontend>* _aidl_return) override;
-    ::ndk::ScopedAStatus openLnb(int32_t in_lnbHandle,
+    ::ndk::ScopedAStatus openLnb(int64_t in_lnbHandle,
                                  shared_ptr<ITunerLnb>* _aidl_return) override;
     ::ndk::ScopedAStatus openLnbByName(const std::string& in_lnbName,
                                        shared_ptr<ITunerLnb>* _aidl_return) override;
-    ::ndk::ScopedAStatus openDemux(int32_t in_demuxHandle,
+    ::ndk::ScopedAStatus openDemux(int64_t in_demuxHandle,
                                    shared_ptr<ITunerDemux>* _aidl_return) override;
     ::ndk::ScopedAStatus getDemuxCaps(DemuxCapabilities* _aidl_return) override;
-    ::ndk::ScopedAStatus getDemuxInfo(int32_t in_demuxHandle, DemuxInfo* _aidl_return) override;
+    ::ndk::ScopedAStatus getDemuxInfo(int64_t in_demuxHandle, DemuxInfo* _aidl_return) override;
     ::ndk::ScopedAStatus getDemuxInfoList(vector<DemuxInfo>* _aidl_return) override;
-    ::ndk::ScopedAStatus openDescrambler(int32_t in_descramblerHandle,
+    ::ndk::ScopedAStatus openDescrambler(int64_t in_descramblerHandle,
                                          shared_ptr<ITunerDescrambler>* _aidl_return) override;
     ::ndk::ScopedAStatus getTunerHalVersion(int32_t* _aidl_return) override;
     ::ndk::ScopedAStatus openSharedFilter(const string& in_filterToken,
@@ -106,7 +105,7 @@
 private:
     void updateTunerResources();
     vector<TunerFrontendInfo> getTRMFrontendInfos();
-    vector<int32_t> getTRMLnbHandles();
+    vector<int64_t> getTRMLnbHandles();
     HidlResult getHidlFrontendIds(hidl_vec<HidlFrontendId>& ids);
     HidlResult getHidlFrontendInfo(const int id, HidlFrontendInfo& info);
     DemuxCapabilities getAidlDemuxCaps(const HidlDemuxCapabilities& caps);