Merge "av: Fix command/setParameter for inactive subEffects" into udc-qpr-dev
diff --git a/camera/Android.bp b/camera/Android.bp
index b3f70f4..a3fd7f9 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -144,6 +144,7 @@
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
+        "aidl/android/hardware/CameraIdRemapping.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
new file mode 100644
index 0000000..e875c53
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraIdRemapping.aidl
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * Specifies a remapping of Camera Ids.
+ *
+ * Example: For a given package, a remapping of camera id0 to id1 specifies
+ * that any operation to perform on id0 should instead be performed on id1.
+ *
+ * @hide
+ */
+parcelable CameraIdRemapping {
+    /**
+     * Specifies remapping of Camera Ids per package.
+     */
+    parcelable PackageIdRemapping {
+        /** Package Name (e.g. com.android.xyz). */
+        String packageName;
+        /**
+         * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
+         * affected.
+         */
+        List<String> cameraIdsToReplace;
+        /**
+         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
+         *  the updated camera id for cameraIdsToReplace[i].
+         */
+        List<String> updatedCameraIds;
+    }
+
+    /**
+     * List of Camera Id remappings to perform.
+     */
+    List<PackageIdRemapping> packageIdRemappings;
+}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index f8e1631..01b8ff8 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -29,6 +29,7 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
+import android.hardware.CameraIdRemapping;
 import android.hardware.CameraStatus;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -131,6 +132,22 @@
             int targetSdkVersion);
 
     /**
+     * Remap Camera Ids in the CameraService.
+     *
+     * Once this is in effect, all binder calls in the ICameraService that
+     * use logicalCameraId should consult remapping state to arrive at the
+     * correct cameraId to perform the operation on.
+     *
+     * Note: Before the new cameraIdRemapping state is applied, the previous
+     * state is cleared.
+     *
+     * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
+     *        cameraIdRemapping object will result in clearing of any previous
+     *        cameraIdRemapping state in the camera service.
+     */
+    void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+
+    /**
      * Remove listener for changes to camera device and flashlight state.
      */
     void removeListener(ICameraServiceListener listener);
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index bd679e5..af00e55 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -5384,7 +5384,7 @@
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
      * They can be queried through
      * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#get">CameraCharacteristics#get</a> with
-     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)</a>.
+     * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION</a>.
      * Unless reported by both
      * <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>s, the outputs from
      * <code>android.scaler.streamConfigurationMapMaximumResolution</code> and
@@ -5399,13 +5399,12 @@
      * <ul>
      * <li>
      * <p>The mandatory stream combinations listed in
-     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
-     *   would not apply.</p>
+     *   android.scaler.mandatoryMaximumResolutionStreamCombinations  would not apply.</p>
      * </li>
      * <li>
      * <p>The bayer pattern of {@code RAW} streams when
      *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
-     *   is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.</p>
+     *   is selected will be the one listed in ACAMERA_SENSOR_INFO_BINNING_FACTOR.</p>
      * </li>
      * <li>
      * <p>The following keys will always be present:</p>
@@ -5419,6 +5418,7 @@
      * </ul>
      *
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+     * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
      * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
      */
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index df30dba..0aeed08 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -222,6 +222,21 @@
     return mInit;
 }
 
+void Component::onDeathReceived() {
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mClientDied = true;
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->invalidate();
+            }
+        }
+    }
+    release();
+}
+
 // Methods from ::android::hardware::media::c2::V1_0::IComponent
 Return<Status> Component::queue(const WorkBundle& workBundle) {
     std::list<std::unique_ptr<C2Work>> c2works;
@@ -409,9 +424,19 @@
         blockPool = nullptr;
     }
     if (blockPool) {
-        mBlockPoolsMutex.lock();
-        mBlockPools.emplace(blockPool->getLocalId(), blockPool);
-        mBlockPoolsMutex.unlock();
+        bool emplaced = false;
+        {
+            mBlockPoolsMutex.lock();
+            if (!mClientDied) {
+                mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+                emplaced = true;
+            }
+            mBlockPoolsMutex.unlock();
+        }
+        if (!emplaced) {
+            blockPool.reset();
+            status = C2_BAD_STATE;
+        }
     } else if (status == C2_OK) {
         status = C2_CORRUPTED;
     }
@@ -494,8 +519,8 @@
                 ) override {
             auto strongComponent = mComponent.promote();
             if (strongComponent) {
-                LOG(INFO) << "Client died ! release the component !!";
-                strongComponent->release();
+                LOG(INFO) << "Client died ! notify and release the component !!";
+                strongComponent->onDeathReceived();
             } else {
                 LOG(ERROR) << "Client died ! no component to release !!";
             }
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index e343655..3f55618 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -66,6 +66,8 @@
             const sp<::android::hardware::media::bufferpool::V2_0::
                 IClientManager>& clientPoolManager);
     c2_status_t status() const;
+    // Receives a death notification of the client.
+    void onDeathReceived();
 
     typedef ::android::hardware::graphics::bufferqueue::V1_0::
             IGraphicBufferProducer HGraphicBufferProducer1;
@@ -135,6 +137,7 @@
 
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
+    bool mClientDied{false};
 };
 
 }  // namespace utils
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 2dd922f..d0f4f19 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -222,6 +222,21 @@
     return mInit;
 }
 
+void Component::onDeathReceived() {
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mClientDied = true;
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->invalidate();
+            }
+        }
+    }
+    release();
+}
+
 // Methods from ::android::hardware::media::c2::V1_1::IComponent
 Return<Status> Component::queue(const WorkBundle& workBundle) {
     std::list<std::unique_ptr<C2Work>> c2works;
@@ -409,9 +424,19 @@
         blockPool = nullptr;
     }
     if (blockPool) {
-        mBlockPoolsMutex.lock();
-        mBlockPools.emplace(blockPool->getLocalId(), blockPool);
-        mBlockPoolsMutex.unlock();
+        bool emplaced = false;
+        {
+            mBlockPoolsMutex.lock();
+            if (!mClientDied) {
+                mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+                emplaced = true;
+            }
+            mBlockPoolsMutex.unlock();
+        }
+        if (!emplaced) {
+            blockPool.reset();
+            status = C2_BAD_STATE;
+        }
     } else if (status == C2_OK) {
         status = C2_CORRUPTED;
     }
@@ -501,8 +526,8 @@
                 ) override {
             auto strongComponent = component.promote();
             if (strongComponent) {
-                LOG(INFO) << "Client died ! release the component !!";
-                strongComponent->release();
+                LOG(INFO) << "Client died ! notify and release the component !!";
+                strongComponent->onDeathReceived();
             } else {
                 LOG(ERROR) << "Client died ! no component to release !!";
             }
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index 1c8c20c..f16de24 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -69,6 +69,8 @@
             const sp<::android::hardware::media::bufferpool::V2_0::
                 IClientManager>& clientPoolManager);
     c2_status_t status() const;
+    // Receives a death notification of the client.
+    void onDeathReceived();
 
     typedef ::android::hardware::graphics::bufferqueue::V1_0::
             IGraphicBufferProducer HGraphicBufferProducer1;
@@ -140,6 +142,7 @@
 
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
+    bool mClientDied{false};
 };
 
 } // namespace utils
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 7994d32..036c900 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -222,6 +222,21 @@
     return mInit;
 }
 
+void Component::onDeathReceived() {
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        mClientDied = true;
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->invalidate();
+            }
+        }
+    }
+    release();
+}
+
 // Methods from ::android::hardware::media::c2::V1_1::IComponent
 Return<Status> Component::queue(const WorkBundle& workBundle) {
     std::list<std::unique_ptr<C2Work>> c2works;
@@ -409,9 +424,19 @@
         blockPool = nullptr;
     }
     if (blockPool) {
-        mBlockPoolsMutex.lock();
-        mBlockPools.emplace(blockPool->getLocalId(), blockPool);
-        mBlockPoolsMutex.unlock();
+        bool emplaced = false;
+        {
+            mBlockPoolsMutex.lock();
+            if (!mClientDied) {
+                mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+                emplaced = true;
+            }
+            mBlockPoolsMutex.unlock();
+        }
+        if (!emplaced) {
+            blockPool.reset();
+            status = C2_BAD_STATE;
+        }
     } else if (status == C2_OK) {
         status = C2_CORRUPTED;
     }
@@ -532,8 +557,8 @@
                 ) override {
             auto strongComponent = component.promote();
             if (strongComponent) {
-                LOG(INFO) << "Client died ! release the component !!";
-                strongComponent->release();
+                LOG(INFO) << "Client died ! notify and release the component !!";
+                strongComponent->onDeathReceived();
             } else {
                 LOG(ERROR) << "Client died ! no component to release !!";
             }
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index d0972ee..6a73392 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -69,6 +69,8 @@
             const sp<::android::hardware::media::bufferpool::V2_0::
                 IClientManager>& clientPoolManager);
     c2_status_t status() const;
+    // Receives a death notification of the client.
+    void onDeathReceived();
 
     typedef ::android::hardware::graphics::bufferqueue::V1_0::
             IGraphicBufferProducer HGraphicBufferProducer1;
@@ -145,7 +147,7 @@
 
     using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
     sp<HwDeathRecipient> mDeathRecipient;
-
+    bool mClientDied{false};
 };
 
 } // namespace utils
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 86fd8ab..a75ce70 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -46,6 +46,7 @@
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
@@ -672,8 +673,7 @@
     }
 
     void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) override {
-        mCodec->mCallback->onOutputFramesRendered(
-                {RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
+        mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
     }
 
     void onOutputBuffersChanged() override {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index d72d228..feca03d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -67,6 +67,7 @@
 using hardware::hidl_vec;
 using hardware::fromHeap;
 using hardware::HidlMemory;
+using server_configurable_flags::GetServerConfigurableFlag;
 
 using namespace hardware::cas::V1_0;
 using namespace hardware::cas::native::V1_0;
@@ -82,6 +83,11 @@
 // than making it non-blocking. Do not change this value.
 const static size_t kDequeueTimeoutNs = 0;
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 }  // namespace
 
 CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -148,6 +154,7 @@
       mCCodecCallback(callback),
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
       mIsSurfaceToDisplay(false),
       mHasPresentFenceTimes(false),
       mRenderingDepth(3u),
@@ -174,8 +181,7 @@
         Mutexed<BlockPools>::Locked pools(mBlockPools);
         pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
     }
-    std::string value = server_configurable_flags::GetServerConfigurableFlag(
-            "media_native", "ccodec_rendering_depth", "3");
+    std::string value = GetServerConfigurableFlag("media_native", "ccodec_rendering_depth", "3");
     android::base::ParseInt(value, &mRenderingDepth);
     mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
 }
@@ -996,7 +1002,7 @@
 
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-    if (mIsSurfaceToDisplay) {
+    if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
         trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
         processRenderedFrames(qbo.frameTimestamps);
     } else {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 2d87aa9..41f5ae2 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -334,6 +334,7 @@
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
     std::deque<TrackedFrame> mTrackedFrames;
+    bool mAreRenderMetricsEnabled;
     bool mIsSurfaceToDisplay;
     bool mHasPresentFenceTimes;
 
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 29aad5e..320b192 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -103,6 +103,13 @@
 
     virtual void getConsumerUsage(uint64_t *consumerUsage);
 
+    /**
+     * Invalidate the class.
+     *
+     * After the call, fetchGraphicBlock() will return C2_BAD_STATE.
+     */
+    virtual void invalidate();
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index f2cd585..5fb0c8f 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -601,6 +601,9 @@
         static int kMaxIgbpRetryDelayUs = 10000;
 
         std::unique_lock<std::mutex> lock(mMutex);
+        if (mInvalidated) {
+            return C2_BAD_STATE;
+        }
         if (mLastDqLogTs == 0) {
             mLastDqLogTs = getTimestampNow();
         } else {
@@ -746,6 +749,11 @@
         *consumeUsage = mConsumerUsage;
     }
 
+    void invalidate() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        mInvalidated = true;
+    }
+
 private:
     friend struct C2BufferQueueBlockPoolData;
 
@@ -783,6 +791,7 @@
     // if the token has been expired, the buffers will not call IGBP::cancelBuffer()
     // when they are no longer used.
     std::shared_ptr<int> mIgbpValidityToken;
+    bool mInvalidated{false};
 };
 
 C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
@@ -1103,3 +1112,9 @@
     }
 }
 
+void C2BufferQueueBlockPool::invalidate() {
+    if (mImpl) {
+        mImpl->invalidate();
+    }
+}
+
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index d55a3d8..d8c2292 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -114,8 +114,8 @@
 }
 
 namespace {
-    constexpr int kSpinNumForLock = 100;
-    constexpr int kSpinNumForUnlock = 200;
+    constexpr int kSpinNumForLock = 0;
+    constexpr int kSpinNumForUnlock = 0;
 
     enum : uint32_t {
         FUTEX_UNLOCKED = 0,
@@ -125,32 +125,65 @@
 }
 
 int C2SyncVariables::lock() {
-    uint32_t old;
+    uint32_t old = FUTEX_UNLOCKED;
+
+    // see if we can lock uncontended immediately (if previously unlocked)
+    if (mLock.compare_exchange_strong(old, FUTEX_LOCKED_UNCONTENDED)) {
+        return 0;
+    }
+
+    // spin to see if we can get it with a short wait without involving kernel
     for (int i = 0; i < kSpinNumForLock; i++) {
-        old = 0;
+        sched_yield();
+
+        old = FUTEX_UNLOCKED;
         if (mLock.compare_exchange_strong(old, FUTEX_LOCKED_UNCONTENDED)) {
             return 0;
         }
-        sched_yield();
     }
 
-    if (old == FUTEX_LOCKED_UNCONTENDED)
+    // still locked, if other side thinks it was uncontended, now it is contended, so let them
+    // know that they need to wake us up.
+    if (old == FUTEX_LOCKED_UNCONTENDED) {
         old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
+        // It is possible that the other holder released the lock at this very moment (and old
+        // becomes UNLOCKED), If so, we will not involve the kernel to wait for the lock to be
+        // released, but are still marking our lock contended (even though we are the only
+        // holders.)
+    }
 
-    while (old) {
-        (void) syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+    // while the futex is still locked by someone else
+    while (old != FUTEX_UNLOCKED) {
+        // wait until other side releases the lock (and still contented)
+        (void)syscall(__NR_futex, &mLock, FUTEX_WAIT, FUTEX_LOCKED_CONTENDED, NULL, NULL, 0);
+        // try to relock
         old = mLock.exchange(FUTEX_LOCKED_CONTENDED);
     }
     return 0;
 }
 
 int C2SyncVariables::unlock() {
-    if (mLock.exchange(FUTEX_UNLOCKED) == FUTEX_LOCKED_UNCONTENDED) return 0;
+    // TRICKY: here we assume that we are holding this lock
 
+    // unlock the lock immediately (since we were holding it)
+    // If it is (still) locked uncontested, we are done (no need to involve the kernel)
+    if (mLock.exchange(FUTEX_UNLOCKED) == FUTEX_LOCKED_UNCONTENDED) {
+        return 0;
+    }
+
+    // We don't need to spin for unlock as here we know already we have a waiter who we need to
+    // wake up. This code was here in case someone just happened to lock this lock (uncontested)
+    // before we would wake up other waiters to avoid a syscall. It is unsure if this ever gets
+    // exercised or if this is the behavior we want. (Note that if this code is removed, the same
+    // situation is still handled in lock() by the woken up waiter that realizes that the lock is
+    // now taken.)
     for (int i = 0; i < kSpinNumForUnlock; i++) {
-        if (mLock.load()) {
+        // here we seem to check if someone relocked this lock, and if they relocked uncontested,
+        // we up it to contested (since there are other waiters.)
+        if (mLock.load() != FUTEX_UNLOCKED) {
             uint32_t old = FUTEX_LOCKED_UNCONTENDED;
             mLock.compare_exchange_strong(old, FUTEX_LOCKED_CONTENDED);
+            // this is always true here so we return immediately
             if (old) {
                 return 0;
             }
@@ -158,7 +191,8 @@
         sched_yield();
     }
 
-    (void) syscall(__NR_futex, &mLock, FUTEX_WAKE, 1, NULL, NULL, 0);
+    // wake up one waiter
+    (void)syscall(__NR_futex, &mLock, FUTEX_WAKE, 1, NULL, NULL, 0);
     return 0;
 }
 
diff --git a/media/libaudioclient/aidl/android/media/ISoundDose.aidl b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
index 6cb22ef..d80b6bf 100644
--- a/media/libaudioclient/aidl/android/media/ISoundDose.aidl
+++ b/media/libaudioclient/aidl/android/media/ISoundDose.aidl
@@ -55,6 +55,30 @@
      */
     oneway void setCsdEnabled(boolean enabled);
 
+    /**
+     * Structure containing a device identifier by address and type together with
+     * the categorization whether it is a headphone or not.
+     */
+    @JavaDerive(toString = true)
+    parcelable AudioDeviceCategory {
+        @utf8InCpp String address;
+        int internalAudioType;
+        boolean csdCompatible;
+    }
+
+    /**
+     * Resets the list of stored device categories for the native layer. Should
+     * only be called once at boot time after parsing the existing AudioDeviceCategories.
+     */
+    oneway void initCachedAudioDeviceCategories(in AudioDeviceCategory[] audioDevices);
+
+    /**
+     * Sets whether a device for a given address and type is a headphone or not.
+     * This is used to determine whether we compute the CSD on the given device
+     * since we can not rely completely on the device annotations.
+     */
+    oneway void setAudioDeviceCategory(in AudioDeviceCategory audioDevice);
+
     /* -------------------------- Test API methods --------------------------
     /** Get the currently used RS2 upper bound. */
     float getOutputRs2UpperBound();
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index c7d7f69..99e3565 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -293,6 +293,9 @@
     if (status_t status = filterAndUpdateScreenParameters(parameters); status != OK) {
         ALOGW("%s: filtering or updating screen parameters failed: %d", __func__, status);
     }
+    if (status_t status = filterAndUpdateTelephonyParameters(parameters); status != OK) {
+        ALOGW("%s: filtering or updating telephony parameters failed: %d", __func__, status);
+    }
     return parseAndSetVendorParameters(mVendorExt, mModule, parameters);
 }
 
@@ -1370,6 +1373,52 @@
     return OK;
 }
 
+status_t DeviceHalAidl::filterAndUpdateTelephonyParameters(AudioParameter &parameters) {
+    TIME_CHECK();
+    using TtyMode = ITelephony::TelecomConfig::TtyMode;
+    ITelephony::TelecomConfig telConfig;
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyTtyMode),
+                    [&telConfig](const String8& mode) {
+                        if (mode == AudioParameter::valueTtyModeOff) {
+                            telConfig.ttyMode = TtyMode::OFF;
+                            return OK;
+                        } else if (mode == AudioParameter::valueTtyModeFull) {
+                            telConfig.ttyMode = TtyMode::FULL;
+                            return OK;
+                        } else if (mode == AudioParameter::valueTtyModeHco) {
+                            telConfig.ttyMode = TtyMode::HCO;
+                            return OK;
+                        } else if (mode == AudioParameter::valueTtyModeVco) {
+                            telConfig.ttyMode = TtyMode::VCO;
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyTtyMode, mode.c_str());
+                        return BAD_VALUE;
+                    }));
+    (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
+                    parameters, String8(AudioParameter::keyHacSetting),
+                    [&telConfig](const String8& onOrOff) {
+                        if (onOrOff == AudioParameter::valueHacOn) {
+                            telConfig.isHacEnabled = Boolean{ .value = true };
+                            return OK;
+                        } else if (onOrOff == AudioParameter::valueHacOff) {
+                            telConfig.isHacEnabled = Boolean{ .value = false };
+                            return OK;
+                        }
+                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyHacSetting, onOrOff.c_str());
+                        return BAD_VALUE;
+                    }));
+    if (mTelephony != nullptr && telConfig != ITelephony::TelecomConfig{}) {
+        ITelephony::TelecomConfig newTelConfig;
+        return statusTFromBinderStatus(
+                mTelephony->setTelecomConfig(telConfig, &newTelConfig));
+    }
+    return OK;
+}
+
 status_t DeviceHalAidl::findOrCreatePatch(
         const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
     std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 6566de1..6b34bf4 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -224,6 +224,7 @@
     status_t filterAndUpdateBtLeParameters(AudioParameter &parameters);
     status_t filterAndUpdateBtScoParameters(AudioParameter &parameters);
     status_t filterAndUpdateScreenParameters(AudioParameter &parameters);
+    status_t filterAndUpdateTelephonyParameters(AudioParameter &parameters);
     status_t findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds,
         const std::set<int32_t>& sinkPortConfigIds,
diff --git a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
index f00b1a0..a8f9f7e 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalAidl.cpp
@@ -14,7 +14,10 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <map>
 #include <memory>
+#include <string>
 
 #define LOG_TAG "DevicesFactoryHalAidl"
 //#define LOG_NDEBUG 0
@@ -75,6 +78,21 @@
                 if (strcmp(instance, "default") == 0) instance = "primary";
                 static_cast<decltype(names)>(context)->push_back(instance);
             });
+    std::sort(names->begin(), names->end(), [](const std::string& lhs,
+                    const std::string& rhs) {
+        // This order corresponds to the canonical order of modules as specified in
+        // the reference 'audio_policy_configuration_7_0.xml' file.
+        static const std::map<std::string, int> kPriorities{
+            { "primary", 0 }, { "a2dp", 1 }, { "usb", 2 }, { "r_submix", 3 },
+            { "bluetooth", 4 }, { "hearing_aid", 5 }, { "msd", 6 }, { "stub", 7 }
+        };
+        auto lhsIt = kPriorities.find(lhs);
+        auto rhsIt = kPriorities.find(rhs);
+        if (lhsIt != kPriorities.end() && rhsIt != kPriorities.end()) {
+            return lhsIt->second < rhsIt->second;
+        }
+        return lhsIt != kPriorities.end();
+    });
     return OK;
 }
 
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index e4c3309..2d9e155 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -56,6 +56,7 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::State;
 
 namespace android {
 namespace effect {
@@ -165,13 +166,20 @@
 
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
+    State state = State::INIT;
+    if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
+        state != State::PROCESSING) {
+        ALOGI("%s skipping %s process because it's %s", __func__, mDesc.common.name.c_str(),
+              mConversion->isBypassing()
+                      ? "bypassing"
+                      : aidl::android::hardware::audio::effect::toString(state).c_str());
+        return OK;
+    }
+
     auto statusQ = mConversion->getStatusMQ();
     auto inputQ = mConversion->getInputMQ();
     auto outputQ = mConversion->getOutputMQ();
     auto efGroup = mConversion->getEventFlagGroup();
-    if (mConversion->isBypassing()) {
-        return OK;
-    }
     if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
         !outputQ->isValid() || !efGroup) {
         ALOGE("%s invalid FMQ [Status %d I %d O %d] efGroup %p", __func__,
diff --git a/media/libmediahelper/AudioParameter.cpp b/media/libmediahelper/AudioParameter.cpp
index 3832e90..816396e 100644
--- a/media/libmediahelper/AudioParameter.cpp
+++ b/media/libmediahelper/AudioParameter.cpp
@@ -43,6 +43,16 @@
 const char * const AudioParameter::keyBtHfpEnable = AUDIO_PARAMETER_KEY_HFP_ENABLE;
 const char * const AudioParameter::keyBtHfpSamplingRate = AUDIO_PARAMETER_KEY_HFP_SET_SAMPLING_RATE;
 const char * const AudioParameter::keyBtHfpVolume = AUDIO_PARAMETER_KEY_HFP_VOLUME;
+#ifndef __ANDROID_VNDK__
+const char * const AudioParameter::keyTtyMode = AUDIO_PARAMETER_KEY_TTY_MODE;
+const char * const AudioParameter::valueTtyModeOff = AUDIO_PARAMETER_VALUE_TTY_OFF;
+const char * const AudioParameter::valueTtyModeFull = AUDIO_PARAMETER_VALUE_TTY_FULL;
+const char * const AudioParameter::valueTtyModeHco = AUDIO_PARAMETER_VALUE_TTY_HCO;
+const char * const AudioParameter::valueTtyModeVco = AUDIO_PARAMETER_VALUE_TTY_VCO;
+const char * const AudioParameter::keyHacSetting = AUDIO_PARAMETER_KEY_HAC;
+const char * const AudioParameter::valueHacOff = AUDIO_PARAMETER_VALUE_HAC_OFF;
+const char * const AudioParameter::valueHacOn = AUDIO_PARAMETER_VALUE_HAC_ON;
+#endif  // __ANDROID_VNDK__
 const char * const AudioParameter::keyHwAvSync = AUDIO_PARAMETER_HW_AV_SYNC;
 const char * const AudioParameter::keyPresentationId = AUDIO_PARAMETER_STREAM_PRESENTATION_ID;
 const char * const AudioParameter::keyProgramId = AUDIO_PARAMETER_STREAM_PROGRAM_ID;
diff --git a/media/libmediahelper/include/media/AudioParameter.h b/media/libmediahelper/include/media/AudioParameter.h
index 3eee854..a4abd06 100644
--- a/media/libmediahelper/include/media/AudioParameter.h
+++ b/media/libmediahelper/include/media/AudioParameter.h
@@ -69,6 +69,22 @@
     static const char * const keyBtHfpSamplingRate;
     static const char * const keyBtHfpVolume;
 
+#ifndef __ANDROID_VNDK__
+    // These static fields are not used by vendor code, they were added to make
+    // the framework code consistent. There is no plan to expose them to vendors
+    // because they were used by HIDL get/setParameters interface which does not
+    // exist in the AIDL HAL interface.
+    static const char * const keyTtyMode;
+    static const char * const valueTtyModeOff;
+    static const char * const valueTtyModeFull;
+    static const char * const valueTtyModeHco;
+    static const char * const valueTtyModeVco;
+
+    static const char * const keyHacSetting;
+    static const char * const valueHacOff;
+    static const char * const valueHacOn;
+#endif  // __ANDROID_VNDK__
+
     //  keyHwAvSync: get HW synchronization source identifier from a device
     //  keyMonoOutput: Enable mono audio playback
     //  keyStreamHwAvSync: set HW synchronization source identifier on a stream
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 505775b..d506ee5 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -43,6 +43,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <media/hardware/HardwareAPI.h>
 #include <media/MediaBufferHolder.h>
@@ -64,11 +65,14 @@
 #include "include/SharedMemoryBuffer.h"
 #include <media/stagefright/omx/OMXUtils.h>
 
+#include <server_configurable_flags/get_flags.h>
+
 namespace android {
 
 typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
 
 using hardware::media::omx::V1_0::Status;
+using server_configurable_flags::GetServerConfigurableFlag;
 
 enum {
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
@@ -81,6 +85,11 @@
 
 }
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -561,6 +570,9 @@
 ACodec::ACodec()
     : mSampleRate(0),
       mNodeGeneration(0),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
+      mIsWindowToDisplay(false),
+      mHasPresentFenceTimes(false),
       mUsingNativeWindow(false),
       mNativeWindowUsageBits(0),
       mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
@@ -632,7 +644,8 @@
     if (!mBufferChannel) {
         mBufferChannel = std::make_shared<ACodecBufferChannel>(
                 new AMessage(kWhatInputBufferFilled, this),
-                new AMessage(kWhatOutputBufferDrained, this));
+                new AMessage(kWhatOutputBufferDrained, this),
+                new AMessage(kWhatPollForRenderedBuffers, this));
     }
     return mBufferChannel;
 }
@@ -742,6 +755,7 @@
     // if we have not yet started the codec, we can simply set the native window
     if (mBuffers[kPortIndexInput].size() == 0) {
         mNativeWindow = surface;
+        initializeFrameTracking();
         return OK;
     }
 
@@ -850,6 +864,7 @@
 
     mNativeWindow = nativeWindow;
     mNativeWindowUsageBits = usageBits;
+    initializeFrameTracking();
     return OK;
 }
 
@@ -960,7 +975,6 @@
                 BufferInfo info;
                 info.mStatus = BufferInfo::OWNED_BY_US;
                 info.mFenceFd = -1;
-                info.mRenderInfo = NULL;
                 info.mGraphicBuffer = NULL;
                 info.mNewGraphicBuffer = false;
 
@@ -1228,6 +1242,7 @@
 
     *bufferCount = def.nBufferCountActual;
     *bufferSize =  def.nBufferSize;
+    initializeFrameTracking();
     return err;
 }
 
@@ -1266,7 +1281,6 @@
         info.mStatus = BufferInfo::OWNED_BY_US;
         info.mFenceFd = fenceFd;
         info.mIsReadFence = false;
-        info.mRenderInfo = NULL;
         info.mGraphicBuffer = graphicBuffer;
         info.mNewGraphicBuffer = false;
         info.mDequeuedAt = mDequeueCounter;
@@ -1343,7 +1357,6 @@
         BufferInfo info;
         info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
         info.mFenceFd = -1;
-        info.mRenderInfo = NULL;
         info.mGraphicBuffer = NULL;
         info.mNewGraphicBuffer = false;
         info.mDequeuedAt = mDequeueCounter;
@@ -1439,42 +1452,6 @@
     return err;
 }
 
-void ACodec::updateRenderInfoForDequeuedBuffer(
-        ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
-
-    info->mRenderInfo =
-        mRenderTracker.updateInfoForDequeuedBuffer(
-                buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
-
-    // check for any fences already signaled
-    notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
-}
-
-void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
-    if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
-        mRenderTracker.dumpRenderQueue();
-    }
-}
-
-void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
-    std::list<FrameRenderTracker::Info> done =
-        mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
-
-    // unlink untracked frames
-    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
-            it != done.cend(); ++it) {
-        ssize_t index = it->getIndex();
-        if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
-            mBuffers[kPortIndexOutput][index].mRenderInfo = NULL;
-        } else if (index >= 0) {
-            // THIS SHOULD NEVER HAPPEN
-            ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
-        }
-    }
-
-    mCallback->onOutputFramesRendered(done);
-}
-
 void ACodec::onFirstTunnelFrameReady() {
     mCallback->onFirstTunnelFrameReady();
 }
@@ -1529,7 +1506,6 @@
 
                 info->mStatus = BufferInfo::OWNED_BY_US;
                 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
-                updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
                 return info;
             }
         }
@@ -1574,18 +1550,96 @@
     oldest->mNewGraphicBuffer = true;
     oldest->mStatus = BufferInfo::OWNED_BY_US;
     oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
-    mRenderTracker.untrackFrame(oldest->mRenderInfo);
-    oldest->mRenderInfo = NULL;
 
     ALOGV("replaced oldest buffer #%u with age %u, graphicBuffer %p",
             (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
             mDequeueCounter - oldest->mDequeuedAt,
             oldest->mGraphicBuffer->handle);
-
-    updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
     return oldest;
 }
 
+void ACodec::initializeFrameTracking() {
+    mTrackedFrames.clear();
+
+    int isWindowToDisplay = 0;
+    mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER,
+            &isWindowToDisplay);
+    mIsWindowToDisplay = isWindowToDisplay == 1;
+    // No frame tracking is needed if we're not sending frames to the display
+    if (!mIsWindowToDisplay) {
+        // Return early so we don't call into SurfaceFlinger (requiring permissions)
+        return;
+    }
+
+    int hasPresentFenceTimes = 0;
+    mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT,
+            &hasPresentFenceTimes);
+    mHasPresentFenceTimes = hasPresentFenceTimes == 1;
+    if (!mHasPresentFenceTimes) {
+        ALOGI("Using latch times for frame rendered signals - present fences not supported");
+    }
+
+    status_t err = native_window_enable_frame_timestamps(mNativeWindow.get(), true);
+    if (err) {
+        ALOGE("Failed to enable frame timestamps (%d)", err);
+    }
+}
+
+void ACodec::trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
+    // If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
+    // so track the frame as if the desired render time is now.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    if (desiredRenderTimeNs < nowNs) {
+        desiredRenderTimeNs = nowNs;
+    }
+    // We've just queued a frame to the surface, so keep track of it and later check to see if it is
+    // actually rendered.
+    TrackedFrame frame;
+    frame.id = frameId;
+    frame.mediaTimeUs = mediaTimeUs;
+    frame.desiredRenderTimeNs = desiredRenderTimeNs;
+    mTrackedFrames.push_back(frame);
+}
+
+void ACodec::pollForRenderedFrames() {
+    std::list<RenderedFrameInfo> renderedFrameInfos;
+    // Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
+    // in fact, been rendered.
+    int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+    while (!mTrackedFrames.empty()) {
+        TrackedFrame & frame = mTrackedFrames.front();
+        // Frames that should have been rendered at least 100ms in the past are checked
+        if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
+            break;
+        }
+
+        status_t err;
+        nsecs_t latchOrPresentTimeNs = NATIVE_WINDOW_TIMESTAMP_INVALID;
+        err = native_window_get_frame_timestamps(mNativeWindow.get(), frame.id,
+                /* outRequestedPresentTime */ nullptr, /* outAcquireTime */ nullptr,
+                mHasPresentFenceTimes ? nullptr : &latchOrPresentTimeNs, // latch time
+                /* outFirstRefreshStartTime */ nullptr, /* outLastRefreshStartTime */ nullptr,
+                /* outGpuCompositionDoneTime */ nullptr,
+                mHasPresentFenceTimes ? &latchOrPresentTimeNs : nullptr, // display present time,
+                /* outDequeueReadyTime */ nullptr, /* outReleaseTime */ nullptr);
+        if (err) {
+            ALOGE("Failed to get frame timestamps for %lld: %d", (long long) frame.id, err);
+        }
+        // If we don't have a render time by now, then consider the frame as dropped
+        if (latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_PENDING &&
+            latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_INVALID) {
+            renderedFrameInfos.push_back(RenderedFrameInfo(frame.mediaTimeUs,
+                                                           latchOrPresentTimeNs));
+        }
+
+        mTrackedFrames.pop_front();
+    }
+
+    if (!renderedFrameInfos.empty()) {
+        mCallback->onOutputFramesRendered(renderedFrameInfos);
+    }
+}
+
 status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
     if (portIndex == kPortIndexInput) {
         mBufferChannel->setInputBufferArray({});
@@ -1661,11 +1715,6 @@
         ::close(info->mFenceFd);
     }
 
-    if (portIndex == kPortIndexOutput) {
-        mRenderTracker.untrackFrame(info->mRenderInfo, i);
-        info->mRenderInfo = NULL;
-    }
-
     // remove buffer even if mOMXNode->freeBuffer fails
     mBuffers[portIndex].erase(mBuffers[portIndex].begin() + i);
     return err;
@@ -6030,22 +6079,10 @@
     sp<RefBase> obj;
     CHECK(msg->findObject("messages", &obj));
     sp<MessageList> msgList = static_cast<MessageList *>(obj.get());
-
-    bool receivedRenderedEvents = false;
     for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin();
           it != msgList->getList().cend(); ++it) {
         (*it)->setWhat(ACodec::kWhatOMXMessageItem);
         mCodec->handleMessage(*it);
-        int32_t type;
-        CHECK((*it)->findInt32("type", &type));
-        if (type == omx_message::FRAME_RENDERED) {
-            receivedRenderedEvents = true;
-        }
-    }
-
-    if (receivedRenderedEvents) {
-        // NOTE: all buffers are rendered in this case
-        mCodec->notifyOfRenderedFrames();
     }
     return true;
 }
@@ -6554,15 +6591,6 @@
     info->mDequeuedAt = ++mCodec->mDequeueCounter;
     info->mStatus = BufferInfo::OWNED_BY_US;
 
-    if (info->mRenderInfo != NULL) {
-        // The fence for an emptied buffer must have signaled, but there still could be queued
-        // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these,
-        // as we will soon requeue this buffer to the surface. While in theory we could still keep
-        // track of buffers that are requeued to the surface, it is better to add support to the
-        // buffer-queue to notify us of released buffers and their fences (in the future).
-        mCodec->notifyOfRenderedFrames(true /* dropIncomplete */);
-    }
-
     // byte buffers cannot take fences, so wait for any fence now
     if (mCodec->mNativeWindow == NULL) {
         (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone");
@@ -6769,14 +6797,6 @@
             mCodec->mLastHdr10PlusBuffer = hdr10PlusInfo;
         }
 
-        // save buffers sent to the surface so we can get render time when they return
-        int64_t mediaTimeUs = -1;
-        buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-        if (mediaTimeUs >= 0) {
-            mCodec->mRenderTracker.onFrameQueued(
-                    mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd)));
-        }
-
         int64_t timestampNs = 0;
         if (!msg->findInt64("timestampNs", &timestampNs)) {
             // use media timestamp if client did not request a specific render timestamp
@@ -6790,11 +6810,25 @@
         err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
         ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err);
 
+        uint64_t frameId;
+        err = native_window_get_next_frame_id(mCodec->mNativeWindow.get(), &frameId);
+
         info->checkReadFence("onOutputBufferDrained before queueBuffer");
         err = mCodec->mNativeWindow->queueBuffer(
                     mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
-        // TODO(b/266211548): Poll the native window for rendered buffers, since when queueing
-        // buffers, the frame event history delta is retrieved.
+
+        int64_t mediaTimeUs = -1;
+        buffer->meta()->findInt64("timeUs", &mediaTimeUs);
+        if (mCodec->mAreRenderMetricsEnabled && mCodec->mIsWindowToDisplay) {
+            mCodec->trackReleasedFrame(frameId, mediaTimeUs, timestampNs);
+            mCodec->pollForRenderedFrames();
+        } else {
+            // When the surface is an intermediate surface, onFrameRendered is triggered immediately
+            // when the frame is queued to the non-display surface
+            mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs,
+                                                                         timestampNs)});
+        }
+
         info->mFenceFd = -1;
         if (err == OK) {
             info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
@@ -7021,7 +7055,6 @@
     ++mCodec->mNodeGeneration;
 
     mCodec->mComponentName = componentName;
-    mCodec->mRenderTracker.setComponentName(componentName);
     mCodec->mFlags = 0;
 
     if (componentName.endsWith(".secure")) {
@@ -7644,7 +7677,6 @@
 
 void ACodec::ExecutingState::stateEntered() {
     ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str());
-    mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
     mCodec->processDeferredMessages();
 }
 
@@ -7755,7 +7787,15 @@
                     mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();
                 }
             }
-            return true;
+            handled = true;
+            break;
+        }
+
+        case kWhatPollForRenderedBuffers:
+        {
+            mCodec->pollForRenderedFrames();
+            handled = true;
+            break;
         }
 
         default:
@@ -8439,7 +8479,7 @@
 }
 
 bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
-    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, systemNano)});
     return true;
 }
 
@@ -8613,7 +8653,7 @@
 
 bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered(
         int64_t mediaTimeUs, nsecs_t systemNano) {
-    mCodec->onFrameRendered(mediaTimeUs, systemNano);
+    mCodec->mCallback->onOutputFramesRendered({RenderedFrameInfo(mediaTimeUs, systemNano)});
     return true;
 }
 
@@ -8644,10 +8684,6 @@
                             OMX_CommandPortEnable, kPortIndexOutput);
                 }
 
-                // Clear the RenderQueue in which queued GraphicBuffers hold the
-                // actual buffer references in order to free them early.
-                mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
-
                 if (err == OK) {
                     err = mCodec->allocateBuffersOnPort(kPortIndexOutput);
                     ALOGE_IF(err != OK, "Failed to allocate output port buffers after port "
@@ -9031,8 +9067,6 @@
         // the native window for rendering. Let's get those back as well.
         mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
 
-        mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));
-
         mCodec->mCallback->onFlushCompleted();
 
         mCodec->mPortEOS[kPortIndexInput] =
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index 8f2bed2..ad42813 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -32,6 +32,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/ACodec.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/MediaCodecBuffer.h>
 #include <system/window.h>
@@ -87,9 +88,11 @@
 }
 
 ACodecBufferChannel::ACodecBufferChannel(
-        const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained)
+        const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained,
+        const sp<AMessage> &pollForRenderedBuffers)
     : mInputBufferFilled(inputBufferFilled),
       mOutputBufferDrained(outputBufferDrained),
+      mPollForRenderedBuffers(pollForRenderedBuffers),
       mHeapSeqNum(-1) {
 }
 
@@ -488,7 +491,7 @@
 }
 
 void ACodecBufferChannel::pollForRenderedBuffers() {
-    // TODO(b/266211548): Poll the native window for rendered buffers.
+    mPollForRenderedBuffers->post();
 }
 
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 1a8597f..998ffce 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -79,6 +79,7 @@
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/RenderedFrameInfo.h>
 #include <media/stagefright/SurfaceUtils.h>
 #include <nativeloader/dlext_namespaces.h>
 #include <private/android_filesystem_config.h>
@@ -92,6 +93,7 @@
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
 using aidl::android::media::ClientInfoParcel;
+using server_configurable_flags::GetServerConfigurableFlag;
 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
 
@@ -209,6 +211,7 @@
 // Render metrics
 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
@@ -282,6 +285,11 @@
     return (err == NO_MEMORY);
 }
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 static const int kMaxRetry = 2;
 static const int kMaxReclaimWaitTimeInUs = 500000;  // 0.5s
 static const int kNumBuffersAlign = 16;
@@ -750,7 +758,7 @@
             const sp<AMessage> &outputFormat) override;
     virtual void onInputSurfaceDeclined(status_t err) override;
     virtual void onSignaledInputEOS(status_t err) override;
-    virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
+    virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
     virtual void onOutputBuffersChanged() override;
     virtual void onFirstTunnelFrameReady() override;
 private:
@@ -859,7 +867,7 @@
     notify->post();
 }
 
-void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
+void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
     sp<AMessage> notify(mNotify->dup());
     notify->setInt32("what", kWhatOutputFramesRendered);
     if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
@@ -1025,9 +1033,10 @@
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
       mIsSurfaceToDisplay(false),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
       mVideoRenderQualityTracker(
               VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
-                      server_configurable_flags::GetServerConfigurableFlag)),
+                      GetServerConfigurableFlag)),
       mLatencyUnknown(0),
       mBytesEncoded(0),
       mEarliestEncodedPtsUs(INT64_MAX),
@@ -1167,6 +1176,7 @@
         const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
         if (m.frameReleasedCount > 0) {
             mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+            mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
@@ -5953,12 +5963,10 @@
     return onQueueInputBuffer(msg);
 }
 
-//static
-size_t MediaCodec::CreateFramesRenderedMessage(
-        const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
+template<typename T>
+static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
     size_t index = 0;
-    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
-            it != done.cend(); ++it) {
+    for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
         if (it->getRenderTimeNs() < 0) {
             continue; // dropped frame from tracking
         }
@@ -5969,6 +5977,18 @@
     return index;
 }
 
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
+    return CreateFramesRenderedMessageInternal(done, msg);
+}
+
+//static
+size_t MediaCodec::CreateFramesRenderedMessage(
+        const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
+    return CreateFramesRenderedMessageInternal(done, msg);
+}
+
 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
     size_t index;
     CHECK(msg->findSize("index", &index));
@@ -6044,7 +6064,7 @@
 
         // If rendering to the screen, then schedule a time in the future to poll to see if this
         // frame was ever rendered to seed onFrameRendered callbacks.
-        if (mIsSurfaceToDisplay) {
+        if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
             if (mediaTimeUs != INT64_MIN) {
                 noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
                              : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index 4f12a37..e920bd1 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -154,7 +154,7 @@
 }
 
 VideoRenderQualityTracker::Configuration::Configuration() {
-    enabled = true;
+    enabled = false;
 
     // Assume that the app is skipping frames because it's detected that the frame couldn't be
     // rendered in time.
@@ -455,6 +455,8 @@
     if (mMetrics.firstRenderTimeUs == 0) {
         mMetrics.firstRenderTimeUs = actualRenderTimeUs;
     }
+    // Capture the timestamp at which the last frame was rendered
+    mMetrics.lastRenderTimeUs = actualRenderTimeUs;
 
     mMetrics.frameRenderedCount++;
 
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 903280f..a464504 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -29,6 +29,7 @@
 #include <media/IOMX.h>
 
 namespace android {
+ struct ACodec;
 namespace hardware {
 class HidlMemory;
 };
@@ -63,7 +64,8 @@
     };
 
     ACodecBufferChannel(
-            const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained);
+            const sp<AMessage> &inputBufferFilled, const sp<AMessage> &outputBufferDrained,
+            const sp<AMessage> &pollForRenderedBuffers);
     virtual ~ACodecBufferChannel();
 
     // BufferChannelBase interface
@@ -138,6 +140,7 @@
 
     const sp<AMessage> mInputBufferFilled;
     const sp<AMessage> mOutputBufferDrained;
+    const sp<AMessage> mPollForRenderedBuffers;
 
     sp<MemoryDealer> mDealer;
     sp<IMemory> mDecryptDestination;
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index 08c7917..49b2425 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -19,7 +19,7 @@
 
 #include <set>
 #include <stdint.h>
-#include <list>
+#include <deque>
 #include <vector>
 #include <android/native_window.h>
 #include <media/hardware/MetadataBufferType.h>
@@ -27,9 +27,9 @@
 #include <media/IOMX.h>
 #include <media/stagefright/AHierarchicalStateMachine.h>
 #include <media/stagefright/CodecBase.h>
-#include <media/stagefright/FrameRenderTracker.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/SkipCutBuffer.h>
+#include <ui/GraphicBuffer.h>
 #include <utils/NativeHandle.h>
 #include <OMX_Audio.h>
 #include <hardware/gralloc.h>
@@ -156,6 +156,7 @@
         kWhatForceStateTransition    = 'fstt',
         kWhatCheckIfStuck            = 'Cstk',
         kWhatSubmitExtraOutputMetadataBuffer = 'sbxo',
+        kWhatPollForRenderedBuffers  = 'pfrb',
     };
 
     enum {
@@ -177,6 +178,13 @@
                             | static_cast<uint64_t>(BufferUsage::VIDEO_DECODER),
     };
 
+    struct TrackedFrame {
+        int64_t id;
+        int64_t mediaTimeUs;
+        int64_t desiredRenderTimeNs;
+        nsecs_t renderTimeNs;
+    };
+
     struct BufferInfo {
         enum Status {
             OWNED_BY_US,
@@ -204,7 +212,6 @@
         sp<GraphicBuffer> mGraphicBuffer;
         bool mNewGraphicBuffer;
         int mFenceFd;
-        FrameRenderTracker::Info *mRenderInfo;
 
         // The following field and 4 methods are used for debugging only
         bool mIsReadFence;
@@ -251,6 +258,11 @@
     int32_t mNodeGeneration;
     sp<TAllocator> mAllocator[2];
 
+    std::deque<TrackedFrame> mTrackedFrames; // render information for buffers sent to a window
+    bool mAreRenderMetricsEnabled;
+    bool mIsWindowToDisplay;
+    bool mHasPresentFenceTimes;
+
     bool mUsingNativeWindow;
     sp<ANativeWindow> mNativeWindow;
     int mNativeWindowUsageBits;
@@ -267,7 +279,6 @@
     // format updates. This will equal to mOutputFormat until the first actual frame is received.
     sp<AMessage> mBaseOutputFormat;
 
-    FrameRenderTracker mRenderTracker; // render information for buffers rendered by ACodec
     std::vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
@@ -349,6 +360,10 @@
     status_t freeOutputBuffersNotOwnedByComponent();
     BufferInfo *dequeueBufferFromNativeWindow();
 
+    void initializeFrameTracking();
+    void trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs);
+    void pollForRenderedFrames();
+
     inline bool storingMetadataInDecodedBuffers() {
         return (mPortMode[kPortIndexOutput] == IOMX::kPortModeDynamicANWBuffer) && !mIsEncoder;
     }
@@ -571,21 +586,6 @@
     void processDeferredMessages();
 
     void onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
-    // called when we have dequeued a buffer |buf| from the native window to track render info.
-    // |fenceFd| is the dequeue fence, and |info| points to the buffer info where this buffer is
-    // stored.
-    void updateRenderInfoForDequeuedBuffer(
-            ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info);
-
-    // Checks to see if any frames have rendered up until |until|, and to notify client
-    // (MediaCodec) of rendered frames up-until the frame pointed to by |until| or the first
-    // unrendered frame. These frames are removed from the render queue.
-    // If |dropIncomplete| is true, unrendered frames up-until |until| will be dropped from the
-    // queue, allowing all rendered framed up till then to be notified of.
-    // (This will effectively clear the render queue up-until (and including) |until|.)
-    // If |until| is NULL, or is not in the rendered queue, this method will check all frames.
-    void notifyOfRenderedFrames(
-            bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
 
     void onFirstTunnelFrameReady();
 
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index 916d41e..90347f9 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -41,7 +41,7 @@
 struct BufferProducerWrapper;
 class MediaCodecBuffer;
 struct PersistentSurface;
-struct RenderedFrameInfo;
+class RenderedFrameInfo;
 class Surface;
 struct ICrypto;
 class IMemory;
diff --git a/media/libstagefright/include/media/stagefright/FrameRenderTracker.h b/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
index c14755a..cab7ecc 100644
--- a/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
+++ b/media/libstagefright/include/media/stagefright/FrameRenderTracker.h
@@ -32,61 +32,59 @@
 
 namespace android {
 
-// Tracks the render information about a frame. Frames go through several states while
-// the render information is tracked:
-//
-// 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
-// queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
-// Key characteristics: mFence is not NULL and mIndex is negative.
-//
-// 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
-// Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
-// invalid.
-//
-// 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
-// Key characteristics: mFence is NULL.
-//
-struct RenderedFrameInfo {
-    // set by client during onFrameQueued or onFrameRendered
-    int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
-
-    // -1 if frame is not yet rendered
-    nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
-
-    // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
-    ssize_t getIndex() const        { return mIndex; }
-
-    // creates information for a queued frame
-    RenderedFrameInfo(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer,
-            const sp<Fence> &fence)
-        : mMediaTimeUs(mediaTimeUs),
-          mRenderTimeNs(-1),
-          mIndex(-1),
-          mGraphicBuffer(graphicBuffer),
-          mFence(fence) {
-    }
-
-    // creates information for a frame rendered on a tunneled surface
-    RenderedFrameInfo(int64_t mediaTimeUs, nsecs_t renderTimeNs)
-        : mMediaTimeUs(mediaTimeUs),
-          mRenderTimeNs(renderTimeNs),
-          mIndex(-1),
-          mGraphicBuffer(NULL),
-          mFence(NULL) {
-    }
-
-private:
-    int64_t mMediaTimeUs;
-    nsecs_t mRenderTimeNs;
-    ssize_t mIndex;         // to be used by client
-    sp<GraphicBuffer> mGraphicBuffer;
-    sp<Fence> mFence;
-
-    friend struct FrameRenderTracker;
-};
-
 struct FrameRenderTracker {
-    typedef RenderedFrameInfo Info;
+    // Tracks the render information about a frame. Frames go through several states while
+    // the render information is tracked:
+    //
+    // 1. queued frame: mMediaTime and mGraphicBuffer are set for the frame. mFence is the
+    // queue fence (read fence). mIndex is negative, and mRenderTimeNs is invalid.
+    // Key characteristics: mFence is not NULL and mIndex is negative.
+    //
+    // 2. dequeued frame: mFence is updated with the dequeue fence (write fence). mIndex is set.
+    // Key characteristics: mFence is not NULL and mIndex is non-negative. mRenderTime is still
+    // invalid.
+    //
+    // 3. rendered frame or frame: mFence is cleared, mRenderTimeNs is set.
+    // Key characteristics: mFence is NULL.
+    //
+    struct Info {
+        // set by client during onFrameQueued or onFrameRendered
+        int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+
+        // -1 if frame is not yet rendered
+        nsecs_t getRenderTimeNs() const { return mRenderTimeNs; }
+
+        // set by client during updateRenderInfoForDequeuedBuffer; -1 otherwise
+        ssize_t getIndex() const        { return mIndex; }
+
+        // creates information for a queued frame
+        Info(int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer,
+                const sp<Fence> &fence)
+          : mMediaTimeUs(mediaTimeUs),
+            mRenderTimeNs(-1),
+            mIndex(-1),
+            mGraphicBuffer(graphicBuffer),
+            mFence(fence) {
+        }
+
+        // creates information for a frame rendered on a tunneled surface
+        Info(int64_t mediaTimeUs, nsecs_t renderTimeNs)
+            : mMediaTimeUs(mediaTimeUs),
+            mRenderTimeNs(renderTimeNs),
+            mIndex(-1),
+            mGraphicBuffer(NULL),
+            mFence(NULL) {
+        }
+
+    private:
+        int64_t mMediaTimeUs;
+        nsecs_t mRenderTimeNs;
+        ssize_t mIndex;         // to be used by client
+        sp<GraphicBuffer> mGraphicBuffer;
+        sp<Fence> mFence;
+
+        friend struct FrameRenderTracker;
+    };
 
     FrameRenderTracker();
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 52d7d3d..36084db 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -64,6 +64,7 @@
 class MediaCodecBuffer;
 class IMemory;
 struct PersistentSurface;
+class RenderedFrameInfo;
 class SoftwareRenderer;
 class Surface;
 namespace hardware {
@@ -281,6 +282,8 @@
     // by adding rendered frame information to a base notification message. Returns the number
     // of frames that were rendered.
     static size_t CreateFramesRenderedMessage(
+            const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg);
+    static size_t CreateFramesRenderedMessage(
             const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg);
 
     static status_t CanFetchLinearBlock(
@@ -572,6 +575,7 @@
     sp<ALooper> mCryptoLooper;
 
     bool mIsSurfaceToDisplay;
+    bool mAreRenderMetricsEnabled;
     PlaybackDurationAccumulator mPlaybackDurationAccumulator;
     VideoRenderQualityTracker mVideoRenderQualityTracker;
 
diff --git a/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h b/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h
new file mode 100644
index 0000000..4b8a58d
--- /dev/null
+++ b/media/libstagefright/include/media/stagefright/RenderedFrameInfo.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RENDERED_FRAME_INFO_H
+#define RENDERED_FRAME_INFO_H
+
+namespace android {
+
+class RenderedFrameInfo {
+public:
+    RenderedFrameInfo(int64_t mediaTimeUs, int64_t renderTimeNs)
+        : mMediaTimeUs(mediaTimeUs), mRenderTimeNs(renderTimeNs) {}
+
+    int64_t getMediaTimeUs() const  { return mMediaTimeUs; }
+    nsecs_t getRenderTimeNs() const { return mRenderTimeNs;}
+
+private:
+    int64_t mMediaTimeUs;
+    nsecs_t mRenderTimeNs;
+};
+
+} // android
+
+#endif // RENDERED_FRAME_INFO_H
\ No newline at end of file
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
index 82ba81c..a656e6e 100644
--- a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -38,6 +38,9 @@
     // The render time of the first video frame.
     int64_t firstRenderTimeUs;
 
+    // The render time of the last video frame.
+    int64_t lastRenderTimeUs;
+
     // The number of frames released to be rendered.
     int64_t frameReleasedCount;
 
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 39f772b..3af8828 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -78,30 +78,8 @@
 
 void AudioFlinger::MelReporter::onFirstRef() {
     mAudioFlinger.mPatchCommandThread->addListener(this);
-}
 
-bool AudioFlinger::MelReporter::shouldComputeMelForDeviceType(audio_devices_t device) {
-    if (!mSoundDoseManager->isCsdEnabled()) {
-        ALOGV("%s csd is disabled", __func__);
-        return false;
-    }
-    if (mSoundDoseManager->forceComputeCsdOnAllDevices()) {
-        return true;
-    }
-
-    switch (device) {
-        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
-        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
-        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
-        case AUDIO_DEVICE_OUT_USB_HEADSET:
-        case AUDIO_DEVICE_OUT_BLE_HEADSET:
-        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
-            return true;
-        default:
-            return false;
-    }
+    mSoundDoseManager = sp<SoundDoseManager>::make(sp<IMelReporterCallback>::fromExisting(this));
 }
 
 void AudioFlinger::MelReporter::updateMetadataForCsd(audio_io_handle_t streamHandle,
@@ -127,16 +105,17 @@
     }
 
     auto activeMelPatchIt = mActiveMelPatches.find(activeMelPatchId.value());
-    if (activeMelPatchIt != mActiveMelPatches.end()
-        && shouldActivateCsd != activeMelPatchIt->second.csdActive) {
-        if (activeMelPatchIt->second.csdActive) {
-            ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
-            stopMelComputationForPatch_l(activeMelPatchIt->second);
-        } else {
-            ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
-            startMelComputationForActivePatch_l(activeMelPatchIt->second);
+    if (activeMelPatchIt != mActiveMelPatches.end()) {
+        if (shouldActivateCsd != activeMelPatchIt->second.csdActive) {
+            if (activeMelPatchIt->second.csdActive) {
+                ALOGV("%s should not compute CSD for stream handle %d", __func__, streamHandle);
+                stopMelComputationForPatch_l(activeMelPatchIt->second);
+            } else {
+                ALOGV("%s should compute CSD for stream handle %d", __func__, streamHandle);
+                startMelComputationForActivePatch_l(activeMelPatchIt->second);
+            }
+            activeMelPatchIt->second.csdActive = shouldActivateCsd;
         }
-        activeMelPatchIt->second.csdActive = shouldActivateCsd;
     }
 }
 
@@ -159,23 +138,28 @@
     audio_io_handle_t streamHandle = patch.mAudioPatch.sources[0].ext.mix.handle;
     ActiveMelPatch newPatch;
     newPatch.streamHandle = streamHandle;
+    newPatch.csdActive = false;
     for (size_t i = 0; i < patch.mAudioPatch.num_sinks; ++i) {
-        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE
-            && shouldComputeMelForDeviceType(patch.mAudioPatch.sinks[i].ext.device.type)) {
+        if (patch.mAudioPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE &&
+                mSoundDoseManager->shouldComputeCsdForDeviceType(
+                        patch.mAudioPatch.sinks[i].ext.device.type)) {
             audio_port_handle_t deviceId = patch.mAudioPatch.sinks[i].id;
-            newPatch.deviceHandles.push_back(deviceId);
+            bool shouldComputeCsd = mSoundDoseManager->shouldComputeCsdForDeviceWithAddress(
+                    patch.mAudioPatch.sinks[i].ext.device.type,
+                    patch.mAudioPatch.sinks[i].ext.device.address);
+            newPatch.deviceStates.push_back({deviceId, shouldComputeCsd});
+            newPatch.csdActive |= shouldComputeCsd;
             AudioDeviceTypeAddr adt{patch.mAudioPatch.sinks[i].ext.device.type,
                                     patch.mAudioPatch.sinks[i].ext.device.address};
             mSoundDoseManager->mapAddressToDeviceId(adt, deviceId);
         }
     }
 
-    if (!newPatch.deviceHandles.empty()) {
+    if (!newPatch.deviceStates.empty() && newPatch.csdActive) {
         std::lock_guard _afl(mAudioFlinger.mLock);
         std::lock_guard _l(mLock);
         ALOGV("%s add patch handle %d to active devices", __func__, handle);
         startMelComputationForActivePatch_l(newPatch);
-        newPatch.csdActive = true;
         mActiveMelPatches[handle] = newPatch;
     }
 }
@@ -189,18 +173,41 @@
         return;
     }
 
-    for (const auto& deviceHandle : patch.deviceHandles) {
-        ++mActiveDevices[deviceHandle];
-        ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
-              patch.streamHandle, deviceHandle, mActiveDevices[deviceHandle]);
+    for (const auto& device : patch.deviceStates) {
+        if (device.second) {
+            ++mActiveDevices[device.first];
+            ALOGI("%s add stream %d that uses device %d for CSD, nr of streams: %d", __func__,
+                  patch.streamHandle, device.first, mActiveDevices[device.first]);
 
-        if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
-            outputThread->startMelComputation_l(mSoundDoseManager->getOrCreateProcessorForDevice(
-                deviceHandle,
-                patch.streamHandle,
-                outputThread->mSampleRate,
-                outputThread->mChannelCount,
-                outputThread->mFormat));
+            if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
+                outputThread->startMelComputation_l(
+                        mSoundDoseManager->getOrCreateProcessorForDevice(
+                                device.first,
+                                patch.streamHandle,
+                                outputThread->mSampleRate,
+                                outputThread->mChannelCount,
+                                outputThread->mFormat));
+            }
+        }
+    }
+}
+
+void AudioFlinger::MelReporter::startMelComputationForDeviceId(audio_port_handle_t deviceId) {
+    ALOGV("%s(%d)", __func__, deviceId);
+    std::lock_guard _laf(mAudioFlinger.mLock);
+    std::lock_guard _l(mLock);
+
+    for (auto& activeMelPatch : mActiveMelPatches) {
+        bool csdActive = false;
+        for (auto& device: activeMelPatch.second.deviceStates) {
+            if (device.first == deviceId && !device.second) {
+                device.second = true;
+            }
+            csdActive |= device.second;
+        }
+        if (csdActive && !activeMelPatch.second.csdActive) {
+            activeMelPatch.second.csdActive = csdActive;
+            startMelComputationForActivePatch_l(activeMelPatch.second);
         }
     }
 }
@@ -228,7 +235,11 @@
 
     std::lock_guard _afl(mAudioFlinger.mLock);
     std::lock_guard _l(mLock);
-    stopMelComputationForPatch_l(melPatch);
+    if (melPatch.csdActive) {
+        // only need to stop if patch was active
+        melPatch.csdActive = false;
+        stopMelComputationForPatch_l(melPatch);
+    }
 }
 
 sp<media::ISoundDose> AudioFlinger::MelReporter::getSoundDoseInterface(
@@ -247,30 +258,47 @@
 void AudioFlinger::MelReporter::stopMelComputationForPatch_l(const ActiveMelPatch& patch)
 NO_THREAD_SAFETY_ANALYSIS  // access of AudioFlinger::checkOutputThread_l
 {
-    if (!patch.csdActive) {
-        // no need to stop CSD inactive patches
-        return;
-    }
-
     auto outputThread = mAudioFlinger.checkOutputThread_l(patch.streamHandle);
 
     ALOGV("%s: stop MEL for stream id: %d", __func__, patch.streamHandle);
-    for (const auto& deviceId : patch.deviceHandles) {
-        if (mActiveDevices[deviceId] > 0) {
-            --mActiveDevices[deviceId];
-            if (mActiveDevices[deviceId] == 0) {
+    for (const auto& device : patch.deviceStates) {
+        if (mActiveDevices[device.first] > 0) {
+            --mActiveDevices[device.first];
+            if (mActiveDevices[device.first] == 0) {
                 // no stream is using deviceId anymore
-                ALOGI("%s removing device %d from active CSD devices", __func__, deviceId);
-                mSoundDoseManager->clearMapDeviceIdEntries(deviceId);
+                ALOGI("%s removing device %d from active CSD devices", __func__, device.first);
+                mSoundDoseManager->clearMapDeviceIdEntries(device.first);
             }
         }
     }
 
+    mSoundDoseManager->removeStreamProcessor(patch.streamHandle);
     if (outputThread != nullptr && !useHalSoundDoseInterface_l()) {
         outputThread->stopMelComputation_l();
     }
 }
 
+void AudioFlinger::MelReporter::stopMelComputationForDeviceId(audio_port_handle_t deviceId) {
+    ALOGV("%s(%d)", __func__, deviceId);
+    std::lock_guard _laf(mAudioFlinger.mLock);
+    std::lock_guard _l(mLock);
+
+    for (auto& activeMelPatch : mActiveMelPatches) {
+        bool csdActive = false;
+        for (auto& device: activeMelPatch.second.deviceStates) {
+            if (device.first == deviceId && device.second) {
+                device.second = false;
+            }
+            csdActive |= device.second;
+        }
+
+        if (!csdActive && activeMelPatch.second.csdActive) {
+            activeMelPatch.second.csdActive = csdActive;
+            stopMelComputationForPatch_l(activeMelPatch.second);
+        }
+    }
+
+}
 
 std::optional<audio_patch_handle_t> AudioFlinger::MelReporter::activePatchStreamHandle_l(
         audio_io_handle_t streamHandle) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 2bc33f2..604046e 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -29,11 +29,11 @@
  * Class for listening to new patches and starting the MEL computation. MelReporter is
  * concealed within AudioFlinger, their lifetimes are the same.
  */
-class MelReporter : public PatchCommandThread::PatchCommandListener {
+class MelReporter : public PatchCommandThread::PatchCommandListener,
+                    public IMelReporterCallback {
 public:
     explicit MelReporter(AudioFlinger& audioFlinger)
-        : mAudioFlinger(audioFlinger),
-          mSoundDoseManager(sp<SoundDoseManager>::make()) {}
+        : mAudioFlinger(audioFlinger) {}
 
     void onFirstRef() override;
 
@@ -65,6 +65,10 @@
 
     std::string dump();
 
+    // IMelReporterCallback methods
+    void stopMelComputationForDeviceId(audio_port_handle_t deviceId) override;
+    void startMelComputationForDeviceId(audio_port_handle_t deviceId) override;
+
     // PatchCommandListener methods
     void onCreateAudioPatch(audio_patch_handle_t handle,
                             const PatchPanel::Patch& patch) override;
@@ -80,13 +84,15 @@
 private:
     struct ActiveMelPatch {
         audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
-        std::vector<audio_port_handle_t> deviceHandles;
+        /**
+         * Stores device ids and whether they are compatible for CSD calculation.
+         * The boolean value can change since BT audio device types are user-configurable
+         * to headphones/headsets or other device types.
+         */
+        std::vector<std::pair<audio_port_handle_t,bool>> deviceStates;
         bool csdActive;
     };
 
-    /** Returns true if we should compute MEL for the given device. */
-    bool shouldComputeMelForDeviceType(audio_devices_t device);
-
     void stopInternalMelComputation();
 
     /** Should be called with the following order of locks: mAudioFlinger.mLock -> mLock. */
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0f4bfa4..b022a31 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -10614,14 +10614,24 @@
         AudioHwDevice *hwDev,  AudioStreamOut *output, bool systemReady)
     : MmapThread(audioFlinger, id, hwDev, output->stream, systemReady, true /* isOut */),
       mStreamType(AUDIO_STREAM_MUSIC),
-      mStreamVolume(1.0),
-      mStreamMute(false),
       mOutput(output)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = audioFlinger->masterVolume_l();
     mMasterMute = audioFlinger->masterMute_l();
+
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+        mStreamTypes[stream].volume = 0.0f;
+        mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
+    }
+    // Audio patch and call assistant volume are always max
+    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
+
     if (mAudioHwDev) {
         if (mAudioHwDev->canSetMasterVolume()) {
             mMasterVolume = 1.0;
@@ -10678,8 +10688,8 @@
 void AudioFlinger::MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
 {
     Mutex::Autolock _l(mLock);
+    mStreamTypes[stream].volume = value;
     if (stream == mStreamType) {
-        mStreamVolume = value;
         broadcast_l();
     }
 }
@@ -10687,17 +10697,14 @@
 float AudioFlinger::MmapPlaybackThread::streamVolume(audio_stream_type_t stream) const
 {
     Mutex::Autolock _l(mLock);
-    if (stream == mStreamType) {
-        return mStreamVolume;
-    }
-    return 0.0f;
+    return mStreamTypes[stream].volume;
 }
 
 void AudioFlinger::MmapPlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     Mutex::Autolock _l(mLock);
+    mStreamTypes[stream].mute = muted;
     if (stream == mStreamType) {
-        mStreamMute= muted;
         broadcast_l();
     }
 }
@@ -10737,14 +10744,13 @@
 {
     float volume;
 
-    if (mMasterMute || mStreamMute) {
+    if (mMasterMute || streamMuted_l()) {
         volume = 0;
     } else {
-        volume = mMasterVolume * mStreamVolume;
+        volume = mMasterVolume * streamVolume_l();
     }
 
     if (volume != mHalVolFloat) {
-
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
 
@@ -10778,8 +10784,8 @@
             track->setMetadataHasChanged();
             track->processMuteEvent_l(mAudioFlinger->getOrCreateAudioManager(),
                 /*muteState=*/{mMasterMute,
-                               mStreamVolume == 0.f,
-                               mStreamMute,
+                               streamVolume_l() == 0.f,
+                               streamMuted_l(),
                                // TODO(b/241533526): adjust logic to include mute from AppOps
                                false /*muteFromPlaybackRestricted*/,
                                false /*muteFromClientVolume*/,
@@ -10892,7 +10898,7 @@
     MmapThread::dumpInternals_l(fd, args);
 
     dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
-            mStreamType, mStreamVolume, mHalVolFloat, mStreamMute);
+            mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
     dprintf(fd, "  Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 7d16a0b..b5332e1 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -2313,12 +2313,17 @@
 
 protected:
                 void        dumpInternals_l(int fd, const Vector<String16>& args) override;
+                float       streamVolume_l() const {
+                    return mStreamTypes[mStreamType].volume;
+                }
+                bool     streamMuted_l() const {
+                    return mStreamTypes[mStreamType].mute;
+                }
 
+                stream_type_t               mStreamTypes[AUDIO_STREAM_CNT];
                 audio_stream_type_t         mStreamType;
                 float                       mMasterVolume;
-                float                       mStreamVolume;
                 bool                        mMasterMute;
-                bool                        mStreamMute;
                 AudioStreamOut*             mOutput;
 
                 mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index a114a38..a551857 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -299,6 +299,25 @@
     return binder::Status::ok();
 }
 
+binder::Status SoundDoseManager::SoundDose::initCachedAudioDeviceCategories(
+        const std::vector<media::ISoundDose::AudioDeviceCategory>& btDeviceCategories) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->initCachedAudioDeviceCategories(btDeviceCategories);
+    }
+    return binder::Status::ok();
+}
+binder::Status SoundDoseManager::SoundDose::setAudioDeviceCategory(
+        const media::ISoundDose::AudioDeviceCategory& btAudioDevice) {
+    ALOGV("%s", __func__);
+    auto soundDoseManager = mSoundDoseManager.promote();
+    if (soundDoseManager != nullptr) {
+        soundDoseManager->setAudioDeviceCategory(btAudioDevice);
+    }
+    return binder::Status::ok();
+}
+
 binder::Status SoundDoseManager::SoundDose::getOutputRs2UpperBound(float* value) {
     ALOGV("%s", __func__);
     auto soundDoseManager = mSoundDoseManager.promote();
@@ -356,7 +375,9 @@
         auto melProcessor = mp.second.promote();
         if (melProcessor != nullptr) {
             auto deviceId = melProcessor->getDeviceId();
-            if (mActiveDeviceTypes[deviceId] == deviceType) {
+            const auto deviceTypeIt = mActiveDeviceTypes.find(deviceId);
+            if (deviceTypeIt != mActiveDeviceTypes.end() &&
+                deviceTypeIt->second == deviceType) {
                 ALOGV("%s: set attenuation for deviceId %d to %f",
                         __func__, deviceId, attenuationDB);
                 melProcessor->setAttenuation(attenuationDB);
@@ -388,6 +409,103 @@
     return mEnabledCsd;
 }
 
+void SoundDoseManager::initCachedAudioDeviceCategories(
+        const std::vector<media::ISoundDose::AudioDeviceCategory>& deviceCategories) {
+    ALOGV("%s", __func__);
+    {
+        const std::lock_guard _l(mLock);
+        mBluetoothDevicesWithCsd.clear();
+    }
+    for (const auto& btDeviceCategory : deviceCategories) {
+        setAudioDeviceCategory(btDeviceCategory);
+    }
+}
+
+void SoundDoseManager::setAudioDeviceCategory(
+        const media::ISoundDose::AudioDeviceCategory& audioDevice) {
+    ALOGV("%s: set BT audio device type with address %s to headphone %d", __func__,
+          audioDevice.address.c_str(), audioDevice.csdCompatible);
+
+    std::vector<audio_port_handle_t> devicesToStart;
+    std::vector<audio_port_handle_t> devicesToStop;
+    {
+        const std::lock_guard _l(mLock);
+        const auto deviceIt = mBluetoothDevicesWithCsd.find(
+                std::make_pair(audioDevice.address,
+                               static_cast<audio_devices_t>(audioDevice.internalAudioType)));
+        if (deviceIt != mBluetoothDevicesWithCsd.end()) {
+            deviceIt->second = audioDevice.csdCompatible;
+        } else {
+            mBluetoothDevicesWithCsd.emplace(
+                    std::make_pair(audioDevice.address,
+                                   static_cast<audio_devices_t>(audioDevice.internalAudioType)),
+                    audioDevice.csdCompatible);
+        }
+
+        for (const auto &activeDevice: mActiveDevices) {
+            if (activeDevice.first.address() == audioDevice.address &&
+                activeDevice.first.mType ==
+                static_cast<audio_devices_t>(audioDevice.internalAudioType)) {
+                if (audioDevice.csdCompatible) {
+                    devicesToStart.push_back(activeDevice.second);
+                } else {
+                    devicesToStop.push_back(activeDevice.second);
+                }
+            }
+        }
+    }
+
+    for (const auto& deviceToStart : devicesToStart) {
+        mMelReporterCallback->startMelComputationForDeviceId(deviceToStart);
+    }
+    for (const auto& deviceToStop : devicesToStop) {
+        mMelReporterCallback->stopMelComputationForDeviceId(deviceToStop);
+    }
+}
+
+bool SoundDoseManager::shouldComputeCsdForDeviceType(audio_devices_t device) {
+    if (!isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    switch (device) {
+        case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+        case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
+        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+        case AUDIO_DEVICE_OUT_USB_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_HEADSET:
+        case AUDIO_DEVICE_OUT_BLE_BROADCAST:
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool SoundDoseManager::shouldComputeCsdForDeviceWithAddress(const audio_devices_t type,
+                                                            const std::string& deviceAddress) {
+    if (!isCsdEnabled()) {
+        ALOGV("%s csd is disabled", __func__);
+        return false;
+    }
+    if (forceComputeCsdOnAllDevices()) {
+        return true;
+    }
+
+    if (!audio_is_ble_out_device(type) && !audio_is_a2dp_device(type)) {
+        return shouldComputeCsdForDeviceType(type);
+    }
+
+    const std::lock_guard _l(mLock);
+    const auto deviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(deviceAddress, type));
+    return deviceIt != mBluetoothDevicesWithCsd.end() && deviceIt->second;
+}
+
 void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
     // invalidate any HAL sound dose interface used
     setHalSoundDoseInterface(nullptr);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 6c02afb..718913d 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -32,6 +32,15 @@
 
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 
+class IMelReporterCallback : public virtual RefBase {
+public:
+    IMelReporterCallback() {};
+    virtual ~IMelReporterCallback() {};
+
+    virtual void stopMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+    virtual void startMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+};
+
 class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
 public:
     /** CSD is computed with a rolling window of 7 days. */
@@ -39,8 +48,9 @@
     /** Default RS2 upper bound in dBA as defined in IEC 62368-1 3rd edition. */
     static constexpr float kDefaultRs2UpperBound = 100.f;
 
-    SoundDoseManager()
-        : mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
+    explicit SoundDoseManager(const sp<IMelReporterCallback>& melReporterCallback)
+        : mMelReporterCallback(melReporterCallback),
+          mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
           mRs2UpperBound(kDefaultRs2UpperBound) {};
 
     /**
@@ -104,6 +114,21 @@
     /** Returns true if CSD is enabled. */
     bool isCsdEnabled();
 
+    void initCachedAudioDeviceCategories(
+            const std::vector<media::ISoundDose::AudioDeviceCategory>& deviceCategories);
+
+    void setAudioDeviceCategory(
+            const media::ISoundDose::AudioDeviceCategory& audioDevice);
+
+    /**
+     * Returns true if the type can compute CSD. For bluetooth devices we rely on whether we
+     * categorized the address as headphones/headsets, only in this case we return true.
+     */
+    bool shouldComputeCsdForDeviceWithAddress(const audio_devices_t type,
+                                              const std::string& deviceAddress);
+    /** Returns true for all device types which could support CSD computation. */
+    bool shouldComputeCsdForDeviceType(audio_devices_t device);
+
     std::string dump() const;
 
     // used for testing only
@@ -139,6 +164,13 @@
         binder::Status getOutputRs2UpperBound(float* value) override;
         binder::Status setCsdEnabled(bool enabled) override;
 
+        binder::Status initCachedAudioDeviceCategories(
+                const std::vector<media::ISoundDose::AudioDeviceCategory> &btDeviceCategories)
+                override;
+
+        binder::Status setAudioDeviceCategory(
+                const media::ISoundDose::AudioDeviceCategory& btAudioDevice) override;
+
         binder::Status getCsd(float* value) override;
         binder::Status forceUseFrameworkMel(bool useFrameworkMel) override;
         binder::Status forceComputeCsdOnAllDevices(bool computeCsdOnAllDevices) override;
@@ -179,6 +211,8 @@
 
     mutable std::mutex mLock;
 
+    const sp<IMelReporterCallback> mMelReporterCallback;
+
     // no need for lock since MelAggregator is thread-safe
     const sp<audio_utils::MelAggregator> mMelAggregator;
 
@@ -191,6 +225,17 @@
     std::map<AudioDeviceTypeAddr, audio_port_handle_t> mActiveDevices GUARDED_BY(mLock);
     std::unordered_map<audio_port_handle_t, audio_devices_t> mActiveDeviceTypes GUARDED_BY(mLock);
 
+    struct bt_device_type_hash {
+        std::size_t operator() (const std::pair<std::string, audio_devices_t> &deviceType) const {
+            return std::hash<std::string>()(deviceType.first) ^
+                   std::hash<audio_devices_t>()(deviceType.second);
+        }
+    };
+    // storing the BT cached information as received from the java side
+    // see SoundDoseManager::setCachedAudioDeviceCategories
+    std::unordered_map<std::pair<std::string, audio_devices_t>, bool, bt_device_type_hash>
+            mBluetoothDevicesWithCsd GUARDED_BY(mLock);
+
     float mRs2UpperBound GUARDED_BY(mLock);
     std::unordered_map<audio_devices_t, float> mMelAttenuationDB GUARDED_BY(mLock);
 
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index 9fab77d..7d0b3a7 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -39,10 +39,18 @@
                 (const std::shared_ptr<ISoundDose::IHalSoundDoseCallback>&), (override));
 };
 
+class MelReporterCallback : public IMelReporterCallback {
+public:
+    MOCK_METHOD(void, startMelComputationForDeviceId, (audio_port_handle_t), (override));
+    MOCK_METHOD(void, stopMelComputationForDeviceId, (audio_port_handle_t), (override));
+};
+
+
 class SoundDoseManagerTest : public ::testing::Test {
 protected:
     void SetUp() override {
-        mSoundDoseManager = sp<SoundDoseManager>::make();
+        mMelReporterCallback = sp<MelReporterCallback>::make();
+        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback);
         mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
 
         ON_CALL(*mHalSoundDose.get(), setOutputRs2UpperBound)
@@ -52,6 +60,7 @@
             });
     }
 
+    sp<MelReporterCallback> mMelReporterCallback;
     sp<SoundDoseManager> mSoundDoseManager;
     std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
 };
@@ -243,5 +252,53 @@
     EXPECT_TRUE(mSoundDoseManager->forceUseFrameworkMel());
 }
 
+TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStopsNonHeadphone) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    device1.address = "dev1";
+    device1.csdCompatible = false;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+
+    // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), stopMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->setAudioDeviceCategory(device1);
+}
+
+TEST_F(SoundDoseManagerTest, SetAudioDeviceCategoryStartsHeadphone) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    device1.address = "dev1";
+    device1.csdCompatible = true;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+
+        // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), startMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->setAudioDeviceCategory(device1);
+}
+
+TEST_F(SoundDoseManagerTest, InitCachedAudioDevicesStartsOnlyActiveDevices) {
+    media::ISoundDose::AudioDeviceCategory device1;
+    media::ISoundDose::AudioDeviceCategory device2;
+    device1.address = "dev1";
+    device1.csdCompatible = true;
+    device1.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    device2.address = "dev2";
+    device2.csdCompatible = true;
+    device2.internalAudioType = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP;
+    const AudioDeviceTypeAddr dev1Adt{AUDIO_DEVICE_OUT_BLUETOOTH_A2DP, device1.address};
+    std::vector<media::ISoundDose::AudioDeviceCategory> btDevices = {device1, device2};
+
+    // this will mark the device as active
+    mSoundDoseManager->mapAddressToDeviceId(dev1Adt, /*deviceId=*/1);
+    EXPECT_CALL(*mMelReporterCallback.get(), startMelComputationForDeviceId).Times(1);
+
+    mSoundDoseManager->initCachedAudioDeviceCategories(btDevices);
+}
+
+
 }  // namespace
 }  // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index febccac..1e57edd 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -102,9 +102,13 @@
     void setVolume(float volumeDb) { mCurVolumeDb = volumeDb; }
     float getVolume() const { return mCurVolumeDb; }
 
+    void setIsVoice(bool isVoice) { mIsVoice = isVoice; }
+    bool isVoice() const { return mIsVoice; }
+
 private:
     int mMuteCount = 0; /**< mute request counter */
     float mCurVolumeDb = NAN; /**< current volume in dB. */
+    bool mIsVoice = false; /** true if this volume source is used for voice call volume */
 };
 /**
  * Note: volume activities shall be indexed by CurvesId if we want to allow multiple
@@ -162,7 +166,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     /**
      * @brief setStopTime set the stop time due to the client stoppage or a re routing of this
@@ -222,17 +227,25 @@
     {
         return mVolumeActivities[vs].decMuteCount();
     }
-    void setCurVolume(VolumeSource vs, float volumeDb)
+    void setCurVolume(VolumeSource vs, float volumeDb, bool isVoiceVolSrc)
     {
         // Even if not activity for this source registered, need to create anyway
         mVolumeActivities[vs].setVolume(volumeDb);
+        mVolumeActivities[vs].setIsVoice(isVoiceVolSrc);
     }
     float getCurVolume(VolumeSource vs) const
     {
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities) ?
                     mVolumeActivities.at(vs).getVolume() : NAN;
     }
-
+    VolumeSource getVoiceSource() {
+        for (const auto &iter : mVolumeActivities) {
+            if (iter.second.isVoice()) {
+                return iter.first;
+            }
+        }
+        return VOLUME_SOURCE_NONE;
+    }
     bool isStrategyActive(product_strategy_t ps, uint32_t inPastMs = 0, nsecs_t sysTime = 0) const
     {
         return mRoutingActivities.find(ps) != std::end(mRoutingActivities)?
@@ -381,7 +394,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& device,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
@@ -484,7 +498,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 8b23311..2f424b8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -163,7 +163,8 @@
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
                                       uint32_t delayMs,
-                                      bool force)
+                                      bool force,
+                                      bool isVoiceVolSrc)
 {
 
     if (!supportedDevices().containsDeviceAmongTypes(deviceTypes)) {
@@ -176,7 +177,7 @@
     // - the force flag is set
     if (volumeDb != getCurVolume(volumeSource) || force) {
         ALOGV("%s for volumeSrc %d, volume %f, delay %d", __func__, volumeSource, volumeDb, delayMs);
-        setCurVolume(volumeSource, volumeDb);
+        setCurVolume(volumeSource, volumeDb, isVoiceVolSrc);
         return true;
     }
     return false;
@@ -510,11 +511,12 @@
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
-            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force)) {
+            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
         return false;
     }
     if (streams.empty()) {
@@ -560,6 +562,10 @@
     float volumeAmpl = Volume::DbToAmpl(getCurVolume(vs));
     if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) {
         mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
+        VolumeSource callVolSrc = getVoiceSource();
+        if (callVolSrc != VOLUME_SOURCE_NONE) {
+            setCurVolume(callVolSrc, getCurVolume(vs), true);
+        }
     }
     for (const auto &stream : streams) {
         ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
@@ -788,10 +794,11 @@
                                         VolumeSource volumeSource, const StreamTypeVector &streams,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     bool changed = AudioOutputDescriptor::setVolume(
-            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (changed) {
       // TODO: use gain controller on source device if any to adjust volume
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
index e214ae9..8c7a7de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -89,7 +89,12 @@
         if (aidlPort.ext.getTag() == AudioPortExt::mix) {
             auto mixPort = sp<IOProfile>::make("", AUDIO_PORT_ROLE_NONE);
             RETURN_STATUS_IF_ERROR(mixPort->readFromParcelable(fwPort));
-            sortAudioProfiles(mixPort->getAudioProfiles());
+            auto& profiles = mixPort->getAudioProfiles();
+            if (profiles.empty()) {
+                profiles.add(AudioProfile::createFullDynamic(gDynamicFormat));
+            } else {
+                sortAudioProfiles(mixPort->getAudioProfiles());
+            }
             mixPorts.add(mixPort);
             ports.emplace(aidlPort.id, mixPort);
         } else if (aidlPort.ext.getTag() == AudioPortExt::device) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 5d56e7d..8717083 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -5912,22 +5912,26 @@
         }
     }
 
+    // The caller can have the audio config criteria ignored by either passing a null ptr or
+    // the AUDIO_CONFIG_INITIALIZER value.
+    // If an audio config is specified, current policy is to only allow spatialization for
+    // some positional channel masks and PCM format
+
+    if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
+        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
+            return false;
+        }
+        if (!audio_is_linear_pcm(config->format)) {
+            return false;
+        }
+    }
+
     sp<IOProfile> profile =
             getSpatializerOutputProfile(config, devices);
     if (profile == nullptr) {
         return false;
     }
 
-    // The caller can have the audio config criteria ignored by either passing a null ptr or
-    // the AUDIO_CONFIG_INITIALIZER value.
-    // If an audio config is specified, current policy is to only allow spatialization for
-    // some positional channel masks.
-
-    if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
-            return false;
-        }
-    }
     return true;
 }
 
@@ -7832,8 +7836,8 @@
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
-    outputDesc->setVolume(
-            volumeDb, muted, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
+    outputDesc->setVolume(volumeDb, muted, volumeSource, curves.getStreamTypes(),
+            deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
         float voiceVolume;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 5d22ed4..15aced0 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,6 +25,7 @@
 #include <sys/time.h>
 #include <dlfcn.h>
 
+#include <android/content/pm/IPackageManagerNative.h>
 #include <audio_utils/clock.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -215,6 +216,27 @@
 {
     delete interface;
 }
+
+namespace {
+int getTargetSdkForPackageName(std::string_view packageName) {
+    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+    int targetSdk = -1;
+    if (binder != nullptr) {
+        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (pm != nullptr) {
+            const auto status = pm->getTargetSdkVersionForPackage(
+                    String16{packageName.data(), packageName.size()}, &targetSdk);
+            ALOGI("Capy check package %s, sdk %d", packageName.data(), targetSdk);
+            return status.isOk() ? targetSdk : -1;
+        }
+    }
+    return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+} // anonymous
 // ----------------------------------------------------------------------------
 
 AudioPolicyService::AudioPolicyService()
@@ -1926,10 +1948,14 @@
     checkOp();
     mOpCallback = new RecordAudioOpCallback(this);
     ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+    int flags = doesPackageTargetAtLeastU(
+            mAttributionSource.packageName.value_or("")) ?
+            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
     // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
     // since it controls the mic permission for legacy apps.
     mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
         mAttributionSource.packageName.value_or(""))),
+        flags,
         mOpCallback);
 }
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index d1351cf..9f52ddf 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -316,14 +316,24 @@
     for (auto& i : mListenerList) {
         if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
                 i->getListenerUid())) {
-            ALOGV("Skipping torch callback for system-only camera device %s",
-                    cameraId.c_str());
+            ALOGV("%s: Skipping torch callback for system-only camera device %s",
+                    __FUNCTION__, cameraId.c_str());
             continue;
         }
         auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
                 String16{cameraId});
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+        // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
+        // for the specific package that this listener belongs to.
+        std::vector<String8> remappedCameraIds =
+                findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
+        for (auto& remappedCameraId : remappedCameraIds) {
+            ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
+                    String16(remappedCameraId));
+            i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+                    __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+        }
     }
 }
 
@@ -745,6 +755,156 @@
     return Status::ok();
 }
 
+Status CameraService::remapCameraIds(const hardware::CameraIdRemapping&
+      cameraIdRemapping) {
+    if (!checkCallingPermission(sCameraInjectExternalCameraPermission)) {
+        const int pid = CameraThreadState::getCallingPid();
+        const int uid = CameraThreadState::getCallingUid();
+        ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
+                __FUNCTION__, pid, uid);
+        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
+                "Permission Denial: no permission to configure camera id mapping");
+    }
+    TCameraIdRemapping cameraIdRemappingMap{};
+    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
+    if (!parseStatus.isOk()) {
+        return parseStatus;
+    }
+    remapCameraIds(cameraIdRemappingMap);
+    return Status::ok();
+}
+
+Status CameraService::parseCameraIdRemapping(
+        const hardware::CameraIdRemapping& cameraIdRemapping,
+        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
+    String16 packageName;
+    String8 cameraIdToReplace, updatedCameraId;
+    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
+        packageName = packageIdRemapping.packageName;
+        if (packageName == String16("")) {
+            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                    "CameraIdRemapping: Package name cannot be empty");
+        }
+
+        if (packageIdRemapping.cameraIdsToReplace.size()
+            != packageIdRemapping.updatedCameraIds.size()) {
+            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                    "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
+                     String8(packageName).c_str());
+        }
+        for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
+            cameraIdToReplace = String8(packageIdRemapping.cameraIdsToReplace[i]);
+            updatedCameraId = String8(packageIdRemapping.updatedCameraIds[i]);
+            if (cameraIdToReplace == String8("") || updatedCameraId == String8("")) {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
+                        String8(packageName).c_str());
+            }
+            if (cameraIdToReplace == updatedCameraId) {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
+                        " as updatedCameraId for %s",
+                        String8(packageName).c_str());
+            }
+            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
+        }
+    }
+    return Status::ok();
+}
+
+void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
+    // Acquire mServiceLock and prevent other clients from connecting
+    std::unique_ptr<AutoConditionLock> serviceLockWrapper =
+            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
+
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    // This will disconnect all existing clients for camera Ids that are being
+    // remapped in cameraIdRemapping, but only if they were being used by an
+    // affected packageName.
+    std::vector<sp<BasicClient>> clientsToDisconnect;
+    std::vector<String8> cameraIdsToUpdate;
+    for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
+        for (auto& [id0, id1] : injectionMap) {
+            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, String8(packageName).c_str(),
+                    id0.c_str(), id1.c_str());
+            auto clientDescriptor = mActiveClientManager.get(id0);
+            if (clientDescriptor != nullptr) {
+                sp<BasicClient> clientSp = clientDescriptor->getValue();
+                if (clientSp->getPackageName() == packageName) {
+                    // This camera ID is being used by the affected packageName.
+                    clientsToDisconnect.push_back(clientSp);
+                    cameraIdsToUpdate.push_back(id0);
+                }
+            }
+        }
+    }
+
+    // Update mCameraIdRemapping.
+    mCameraIdRemapping.clear();
+    mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
+
+    // Do not hold mServiceLock while disconnecting clients, but retain the condition
+    // blocking other clients from connecting in mServiceLockWrapper if held.
+    mServiceLock.unlock();
+
+    // Disconnect clients.
+    for (auto& clientSp : clientsToDisconnect) {
+        // We send up ERROR_CAMERA_DEVICE so that the app attempts to reconnect
+        // automatically.
+        clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
+                CaptureResultExtras{});
+        // This also triggers the status updates
+        clientSp->disconnect();
+    }
+
+    mServiceLock.lock();
+}
+
+std::vector<String8> CameraService::findOriginalIdsForRemappedCameraId(
+    const String8& inputCameraId, int clientUid) {
+    String16 packageName = getPackageNameFromUid(clientUid);
+    std::vector<String8> cameraIds;
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    if (auto packageMapIter = mCameraIdRemapping.find(packageName);
+        packageMapIter != mCameraIdRemapping.end()) {
+        for (auto& [id0, id1]: packageMapIter->second) {
+            if (id1 == inputCameraId) {
+                cameraIds.push_back(id0);
+            }
+        }
+    }
+    return cameraIds;
+}
+
+String8 CameraService::resolveCameraId(const String8& inputCameraId) {
+  return resolveCameraId(inputCameraId, String16(""));
+}
+
+String8 CameraService::resolveCameraId(
+    const String8& inputCameraId,
+    const String16& packageName) {
+    String16 packageNameVal = packageName;
+    if (packageName == String16("")) {
+        int clientUid = CameraThreadState::getCallingUid();
+        packageNameVal = getPackageNameFromUid(clientUid);
+    }
+    Mutex::Autolock lock(mCameraIdRemappingLock);
+    if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
+        packageMapIter != mCameraIdRemapping.end()) {
+        ALOGI("%s: resolveCameraId: packageName found %s",
+                __FUNCTION__, String8(packageNameVal).c_str());
+        auto packageMap = packageMapIter->second;
+        if (auto replacementIdIter = packageMap.find(inputCameraId);
+            replacementIdIter != packageMap.end()) {
+            ALOGI("%s: resolveCameraId: inputId found %s, replacing with %s",
+                    __FUNCTION__, inputCameraId.c_str(),
+                    replacementIdIter->second.c_str());
+            return replacementIdIter->second;
+        }
+    }
+    return inputCameraId;
+}
+
 Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
         CameraInfo* cameraInfo) {
     ATRACE_CALL();
@@ -815,9 +975,10 @@
     return String8(cameraIdIntToStrLocked(cameraIdInt).c_str());
 }
 
-Status CameraService::getCameraCharacteristics(const String16& cameraId,
+Status CameraService::getCameraCharacteristics(const String16& unresolvedCameraId,
         int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
+    String8 cameraId = resolveCameraId(String8(unresolvedCameraId));
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -837,8 +998,7 @@
 
     Status ret{};
 
-
-    std::string cameraIdStr = String8(cameraId).string();
+    std::string cameraIdStr = cameraId.string();
     bool overrideForPerfClass =
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraIdStr, targetSdkVersion);
@@ -1028,7 +1188,7 @@
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
-        bool forceSlowJpegMode, /*out*/sp<BasicClient>* client) {
+        bool forceSlowJpegMode, const String8& originalCameraId, /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1071,7 +1231,7 @@
         *client = new CameraDeviceClient(cameraService, tmp,
                 cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                overrideForPerfClass, overrideToPortrait);
+                overrideForPerfClass, overrideToPortrait, originalCameraId);
         ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
     }
     return Status::ok();
@@ -1163,7 +1323,7 @@
             internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
-            /*forceSlowJpegMode*/false, /*out*/ tmp)
+            /*forceSlowJpegMode*/false, id, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1700,7 +1860,7 @@
     ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
             clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, /*out*/client);
+            overrideToPortrait, forceSlowJpegMode, id, /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
@@ -1780,7 +1940,7 @@
 
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-        const String16& cameraId,
+        const String16& unresolvedCameraId,
         const String16& clientPackageName,
         const std::optional<String16>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
@@ -1790,7 +1950,7 @@
 
     ATRACE_CALL();
     Status ret = Status::ok();
-    String8 id = String8(cameraId);
+    String8 id = resolveCameraId(String8(unresolvedCameraId), clientPackageName);
     sp<CameraDeviceClient> client = nullptr;
     String16 clientPackageNameAdj = clientPackageName;
     int callingPid = CameraThreadState::getCallingPid();
@@ -1840,7 +2000,7 @@
             /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
             targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
-            /*out*/client);
+            String8(unresolvedCameraId), /*out*/client);
 
     if(!ret.isOk()) {
         logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
@@ -1909,7 +2069,7 @@
         int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
         const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode,
+        bool overrideToPortrait, bool forceSlowJpegMode, const String8& originalCameraId,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
@@ -2026,7 +2186,7 @@
                 clientFeatureId, cameraId, api1CameraId, facing, orientation,
                 clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, forceSlowJpegMode,
+                overrideToPortrait, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2281,7 +2441,8 @@
     return OK;
 }
 
-Status CameraService::turnOnTorchWithStrengthLevel(const String16& cameraId, int32_t torchStrength,
+Status CameraService::turnOnTorchWithStrengthLevel(const String16& unresolvedCameraId,
+        int32_t torchStrength,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
@@ -2292,7 +2453,7 @@
                 "Torch client binder in null.");
     }
 
-    String8 id = String8(cameraId.string());
+    String8 id = resolveCameraId(String8(unresolvedCameraId));
     int uid = CameraThreadState::getCallingUid();
 
     if (shouldRejectSystemCameraConnection(id)) {
@@ -2352,7 +2513,7 @@
 
     {
         Mutex::Autolock al(mTorchUidMapMutex);
-        updateTorchUidMapLocked(cameraId, uid);
+        updateTorchUidMapLocked(String16(id), uid);
     }
     // Check if the current torch strength level is same as the new one.
     bool shouldSkipTorchStrengthUpdates = mCameraProviderManager->shouldSkipTorchStrengthUpdate(
@@ -2411,7 +2572,8 @@
     return Status::ok();
 }
 
-Status CameraService::setTorchMode(const String16& cameraId, bool enabled,
+Status CameraService::setTorchMode(const String16& unresolvedCameraId,
+        bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
@@ -2422,7 +2584,7 @@
                 "Torch client Binder is null");
     }
 
-    String8 id = String8(cameraId.string());
+    String8 id = resolveCameraId(String8(unresolvedCameraId));
     int uid = CameraThreadState::getCallingUid();
 
     if (shouldRejectSystemCameraConnection(id)) {
@@ -2482,7 +2644,7 @@
         // Update UID map - this is used in the torch status changed callbacks, so must be done
         // before setTorchMode
         Mutex::Autolock al(mTorchUidMapMutex);
-        updateTorchUidMapLocked(cameraId, uid);
+        updateTorchUidMapLocked(String16(id), uid);
     }
 
     status_t err = mFlashlight->setTorchMode(id, enabled);
@@ -2947,11 +3109,20 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const String16& cameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const String16& unresolvedCameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const String8 id = String8(cameraId);
+    String8 resolvedId;
+    if (apiVersion == API_VERSION_2) {
+        resolvedId = resolveCameraId(String8(unresolvedCameraId));
+    } else { // if (apiVersion == API_VERSION_1)
+        // We don't support remapping for API 1.
+        // TODO(b/286287541): Also support remapping for API 1.
+        resolvedId = String8(unresolvedCameraId);
+    }
+
+    const String8 id = resolvedId;
 
     ALOGV("%s: for camera ID = %s", __FUNCTION__, id.string());
 
@@ -3011,11 +3182,11 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const String16& cameraId,
+Status CameraService::isHiddenPhysicalCamera(const String16& unresolvedCameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const String8 id = String8(cameraId);
+    const String8 id = resolveCameraId(String8(unresolvedCameraId));
 
     ALOGV("%s: for camera ID = %s", __FUNCTION__, id.string());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(id.string());
@@ -4958,7 +5129,6 @@
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const String8& cameraId, StatusInternal status) {
-
             if (status != StatusInternal::ENUMERATING) {
                 // Update torch status if it has a flash unit.
                 Mutex::Autolock al(mTorchStatusMutex);
@@ -4991,9 +5161,21 @@
                 auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
                         String16(cameraId));
                 listener->handleBinderStatus(ret,
-                        "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                         "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                         __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                         ret.exceptionCode());
+                // Also trigger the callbacks for cameras that were remapped to the current
+                // cameraId for the specific package that this listener belongs to.
+                std::vector<String8> remappedCameraIds =
+                        findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
+                for (auto& remappedCameraId : remappedCameraIds) {
+                    ret = listener->getListener()->onStatusChanged(
+                            mapToInterface(status), String16(remappedCameraId));
+                    listener->handleBinderStatus(ret,
+                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                            __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
+                            ret.exceptionCode());
+                }
             }
         });
 }
@@ -5204,6 +5386,8 @@
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == String16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
+    } else if (args.size() >= 4 && args[0] == String16("remap-camera-id")) {
+        return handleCameraIdRemapping(args, err);
     } else if (args.size() == 1 && args[0] == String16("help")) {
         printHelp(out);
         return OK;
@@ -5212,6 +5396,23 @@
     return BAD_VALUE;
 }
 
+status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
+    uid_t uid = IPCThreadState::self()->getCallingUid();
+    if (uid != AID_ROOT) {
+        dprintf(err, "Must be adb root\n");
+        return PERMISSION_DENIED;
+    }
+    if (args.size() != 4) {
+        dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
+        return BAD_VALUE;
+    }
+    String16 packageName = args[1];
+    String8 cameraIdToReplace = String8(args[2]);
+    String8 cameraIdNew = String8(args[3]);
+    remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
+    return OK;
+}
+
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
     String16 packageName = args[1];
 
@@ -5839,6 +6040,7 @@
         "  clear-stream-use-case-override clear the stream use case override\n"
         "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
         "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
+        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
         "  help print this message\n");
 }
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 3214d4c..65b11e7 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -20,6 +20,7 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/CameraIdRemapping.h>
 #include <android/hardware/camera2/BnCameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraInjectionCallback.h>
 
@@ -61,6 +62,7 @@
 #include <utility>
 #include <unordered_map>
 #include <unordered_set>
+#include <vector>
 
 namespace android {
 
@@ -138,6 +140,9 @@
 
     /////////////////////////////////////////////////////////////////////
     // ICameraService
+    // IMPORTANT: All binder calls that deal with logicalCameraId should use
+    // resolveCameraId(logicalCameraId) to arrive at the correct cameraId to
+    // perform the operation on (in case of Id Remapping).
     virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
 
     virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
@@ -221,6 +226,9 @@
     virtual binder::Status reportExtensionSessionStats(
             const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/);
 
+    virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
+        cameraIdRemapping);
+
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
                                    Parcel* reply, uint32_t flags);
@@ -916,7 +924,7 @@
             int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
             const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode,
+            bool overrideToPortrait, bool forceSlowJpegMode, const String8& originalCameraId,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -943,6 +951,48 @@
     // Mutex guarding mCameraStates map
     mutable Mutex mCameraStatesLock;
 
+    /**
+     * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
+     *
+     * This specifies that for packageName, for every binder operation targeting
+     * cameraIdToReplace, use newCameraIdToUse instead.
+     */
+    typedef std::map<String16, std::map<String8, String8>> TCameraIdRemapping;
+    TCameraIdRemapping mCameraIdRemapping{};
+    /** Mutex guarding mCameraIdRemapping. */
+    Mutex mCameraIdRemappingLock;
+
+    /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
+    binder::Status parseCameraIdRemapping(
+            const hardware::CameraIdRemapping& cameraIdRemapping,
+            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
+
+    /**
+     * Resolve the (potentially remapped) camera Id to use for packageName.
+     *
+     * This returns the Camera Id to use in case inputCameraId was remapped to a
+     * different Id for the given packageName. Otherwise, it returns the inputCameraId.
+     */
+    String8 resolveCameraId(const String8& inputCameraId, const String16& packageName);
+    /**
+     * Resolve the (potentially remapped) camera Id to use.
+     *
+     * This returns the Camera Id to use in case inputCameraId was remapped to a
+     * different Id for the packageName of the client. Otherwise, it returns the inputCameraId.
+     */
+    String8 resolveCameraId(const String8& inputCameraId);
+
+    /**
+     * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
+     */
+    void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
+
+    /**
+     * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
+     */
+    std::vector<String8> findOriginalIdsForRemappedCameraId(
+        const String8& inputCameraId, int clientUid);
+
     // Circular buffer for storing event logging for dumps
     RingBuffer<String8> mEventLog;
     Mutex mLogLock;
@@ -1322,6 +1372,9 @@
     // Set or clear the zoom override flag
     status_t handleSetZoomOverride(const Vector<String16>& args);
 
+    // Set Camera Id remapping using 'cmd'
+    status_t handleCameraIdRemapping(const Vector<String16>& args, int errFd);
+
     // Handle 'watch' command as passed through 'cmd'
     status_t handleWatchCommand(const Vector<String16> &args, int inFd, int outFd);
 
@@ -1367,14 +1420,15 @@
      */
     static String8 getFormattedCurrentTime();
 
-    static binder::Status makeClient(const sp<CameraService>& cameraService,
-            const sp<IInterface>& cameraCb, const String16& packageName,
-            bool systemNativeClient, const std::optional<String16>& featureId,
-            const String8& cameraId, int api1CameraId, int facing, int sensorOrientation,
-            int clientPid, uid_t clientUid, int servicePid,
+    static binder::Status makeClient(
+            const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
+            const String16& packageName, bool systemNativeClient,
+            const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
+            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
             bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
-            /*out*/sp<BasicClient>* client);
+            const String8& originalCameraId,
+            /*out*/ sp<BasicClient>* client);
 
     status_t checkCameraAccess(const String16& opPackageName);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 8348cd9..5b5892a 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -70,7 +70,9 @@
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
                 /*legacyClient*/ true),
-        mParameters(api1CameraId, cameraFacing)
+        mParameters(api1CameraId, cameraFacing),
+        mLatestRequestIds(kMaxRequestIds),
+        mLatestFailedRequestIds(kMaxRequestIds)
 {
     ATRACE_CALL();
 
@@ -1843,7 +1845,7 @@
                     (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT == errorCode)) {
                 Mutex::Autolock al(mLatestRequestMutex);
 
-                mLatestFailedRequestId = resultExtras.requestId;
+                mLatestFailedRequestIds.add(resultExtras.requestId);
                 mLatestRequestSignal.signal();
             }
             mCaptureSequencer->notifyError(errorCode, resultExtras);
@@ -2418,7 +2420,10 @@
 
 status_t Camera2Client::waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout) {
     Mutex::Autolock l(mLatestRequestMutex);
-    while ((mLatestRequestId != requestId) && (mLatestFailedRequestId != requestId)) {
+    while ((std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) ==
+            mLatestRequestIds.end()) &&
+           (std::find(mLatestFailedRequestIds.begin(), mLatestFailedRequestIds.end(), requestId) ==
+            mLatestFailedRequestIds.end())) {
         nsecs_t startTime = systemTime();
 
         auto res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -2427,13 +2432,14 @@
         timeout -= (systemTime() - startTime);
     }
 
-    return (mLatestRequestId == requestId) ? OK : DEAD_OBJECT;
+    return (std::find(mLatestRequestIds.begin(), mLatestRequestIds.end(), requestId) !=
+             mLatestRequestIds.end()) ? OK : DEAD_OBJECT;
 }
 
 void Camera2Client::notifyRequestId(int32_t requestId) {
     Mutex::Autolock al(mLatestRequestMutex);
 
-    mLatestRequestId = requestId;
+    mLatestRequestIds.add(requestId);
     mLatestRequestSignal.signal();
 }
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 6bdb644..a7ea823 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -22,11 +22,7 @@
 #include "common/Camera2ClientBase.h"
 #include "api1/client2/Parameters.h"
 #include "api1/client2/FrameProcessor.h"
-//#include "api1/client2/StreamingProcessor.h"
-//#include "api1/client2/JpegProcessor.h"
-//#include "api1/client2/ZslProcessor.h"
-//#include "api1/client2/CaptureSequencer.h"
-//#include "api1/client2/CallbackProcessor.h"
+#include <media/RingBuffer.h>
 
 namespace android {
 
@@ -263,8 +259,8 @@
 
     mutable Mutex mLatestRequestMutex;
     Condition mLatestRequestSignal;
-    int32_t mLatestRequestId = -1;
-    int32_t mLatestFailedRequestId = -1;
+    static constexpr size_t kMaxRequestIds = BufferQueueDefs::NUM_BUFFER_SLOTS;
+    RingBuffer<int32_t> mLatestRequestIds, mLatestFailedRequestIds;
     status_t waitUntilRequestIdApplied(int32_t requestId, nsecs_t timeout);
     status_t waitUntilCurrentRequestIdLocked();
 };
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 38c615d..1720b55 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -98,7 +98,8 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait) :
+        bool overrideToPortrait,
+        const String8& originalCameraId) :
     Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
@@ -106,8 +107,8 @@
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
-    mOverrideForPerfClass(overrideForPerfClass) {
-
+    mOverrideForPerfClass(overrideForPerfClass),
+    mOriginalCameraId(originalCameraId) {
     ATRACE_CALL();
     ALOGI("CameraDeviceClient %s: Opened", cameraId.string());
 }
@@ -322,7 +323,7 @@
 
         //The first capture settings should always match the logical camera id
         String8 logicalId(request.mPhysicalCameraSettings.begin()->id.c_str());
-        if (mDevice->getId() != logicalId) {
+        if (mDevice->getId() != logicalId && mOriginalCameraId != logicalId) {
             ALOGE("%s: Camera %s: Invalid camera request settings.", __FUNCTION__,
                     mCameraIdStr.string());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
@@ -437,6 +438,8 @@
 
         CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
         for (const auto& it : request.mPhysicalCameraSettings) {
+            std::string resolvedId = (
+                mOriginalCameraId.string() == it.id) ? mDevice->getId().string() : it.id;
             if (it.settings.isEmpty()) {
                 ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
                         __FUNCTION__, mCameraIdStr.string());
@@ -447,7 +450,7 @@
             // Check whether the physical / logical stream has settings
             // consistent with the sensor pixel mode(s) it was configured with.
             // mCameraIdToStreamSet will only have ids that are high resolution
-            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(it.id);
+            const auto streamIdSetIt = mHighResolutionCameraIdToStreamIdSet.find(resolvedId);
             if (streamIdSetIt != mHighResolutionCameraIdToStreamIdSet.end()) {
                 std::list<int> streamIdsUsedInRequest = getIntersection(streamIdSetIt->second,
                         outputStreamIds);
@@ -455,14 +458,14 @@
                         !isSensorPixelModeConsistent(streamIdsUsedInRequest, it.settings)) {
                      ALOGE("%s: Camera %s: Request settings CONTROL_SENSOR_PIXEL_MODE not "
                             "consistent with configured streams. Rejecting request.",
-                            __FUNCTION__, it.id.c_str());
+                            __FUNCTION__, resolvedId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Request settings CONTROL_SENSOR_PIXEL_MODE are not consistent with "
                         "streams configured");
                 }
             }
 
-            String8 physicalId(it.id.c_str());
+            String8 physicalId(resolvedId.c_str());
             bool hasTestPatternModePhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_MODE) !=
                     mSupportedPhysicalRequestKeys.end();
@@ -471,7 +474,7 @@
                     mSupportedPhysicalRequestKeys.end();
             if (physicalId != mDevice->getId()) {
                 auto found = std::find(requestedPhysicalIds.begin(), requestedPhysicalIds.end(),
-                        it.id);
+                        resolvedId);
                 if (found == requestedPhysicalIds.end()) {
                     ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
                             __FUNCTION__, mCameraIdStr.string(), physicalId.string());
@@ -494,11 +497,11 @@
                         }
                     }
 
-                    physicalSettingsList.push_back({it.id, filteredParams,
+                    physicalSettingsList.push_back({resolvedId, filteredParams,
                             hasTestPatternModePhysicalKey, hasTestPatternDataPhysicalKey});
                 }
             } else {
-                physicalSettingsList.push_back({it.id, it.settings});
+                physicalSettingsList.push_back({resolvedId, it.settings});
             }
         }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 1533cf5..95563ee 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -191,7 +191,8 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait);
+            bool overrideToPortrait,
+            const String8& originalCameraId);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
@@ -368,6 +369,9 @@
     std::string mUserTag;
     // The last set video stabilization mode
     int mVideoStabilizationMode = -1;
+
+    // This only exists in case of camera ID Remapping.
+    String8 mOriginalCameraId;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 61c3298..71e49fd 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -3030,6 +3030,7 @@
         mNotifyPipelineDrain(false),
         mFrameNumber(0),
         mLatestRequestId(NAME_NOT_FOUND),
+        mLatestFailedRequestId(NAME_NOT_FOUND),
         mCurrentAfTriggerId(0),
         mCurrentPreCaptureTriggerId(0),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
@@ -3281,7 +3282,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mLatestRequestMutex);
     status_t res;
-    while (mLatestRequestId != requestId) {
+    while (mLatestRequestId != requestId && mLatestFailedRequestId != requestId) {
         nsecs_t startTime = systemTime();
 
         res = mLatestRequestSignal.waitRelative(mLatestRequestMutex, timeout);
@@ -4010,8 +4011,11 @@
                 sp<Camera3Device> parent = mParent.promote();
                 if (parent != nullptr) {
                     const String8& streamCameraId = outputStream->getPhysicalCameraId();
+                    // Consider the case where clients are sending a single logical camera request
+                    // to physical output/outputs
+                    bool singleRequest = captureRequest->mSettingsList.size() == 1;
                     for (const auto& settings : captureRequest->mSettingsList) {
-                        if ((streamCameraId.isEmpty() &&
+                        if (((streamCameraId.isEmpty() || singleRequest) &&
                                 parent->getId() == settings.cameraId.c_str()) ||
                                 streamCameraId == settings.cameraId.c_str()) {
                             outputStream->fireBufferRequestForFrameNumber(
@@ -4356,6 +4360,12 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
+            {
+                Mutex::Autolock al(mLatestRequestMutex);
+
+                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
+                mLatestRequestSignal.signal();
+            }
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index b1dd135..aa1d55a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1142,6 +1142,7 @@
         Condition          mLatestRequestSignal;
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
+        int32_t            mLatestFailedRequestId;
         CameraMetadata     mLatestRequest;
         std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;