Merge "AudioFlinger: Create Thread callback" into udc-qpr-dev-plus-aosp
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
index ab0a39c..e875c53 100644
--- a/camera/aidl/android/hardware/CameraIdRemapping.aidl
+++ b/camera/aidl/android/hardware/CameraIdRemapping.aidl
@@ -35,16 +35,16 @@
          * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
          * affected.
          */
-        String[] cameraIdToReplace;
+        List<String> cameraIdsToReplace;
         /**
-         *  Ordered list of updated Camera Ids, where updatedCameraId[i] corresponds to
-         *  the updated camera id for cameraIdToReplace[i].
+         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
+         *  the updated camera id for cameraIdsToReplace[i].
          */
-        String[] updatedCameraId;
+        List<String> updatedCameraIds;
     }
 
     /**
      * List of Camera Id remappings to perform.
      */
-    List<PackageIdRemapping> packageIdRemapping;
+    List<PackageIdRemapping> packageIdRemappings;
 }
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 51082d1..29afcee 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -67,6 +67,7 @@
 using hardware::hidl_vec;
 using hardware::fromHeap;
 using hardware::HidlMemory;
+using server_configurable_flags::GetServerConfigurableFlag;
 
 using namespace hardware::cas::V1_0;
 using namespace hardware::cas::native::V1_0;
@@ -82,6 +83,11 @@
 // than making it non-blocking. Do not change this value.
 const static size_t kDequeueTimeoutNs = 0;
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 }  // namespace
 
 CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -148,6 +154,7 @@
       mCCodecCallback(callback),
       mFrameIndex(0u),
       mFirstValidFrameIndex(0u),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
       mIsSurfaceToDisplay(false),
       mHasPresentFenceTimes(false),
       mRenderingDepth(3u),
@@ -174,8 +181,7 @@
         Mutexed<BlockPools>::Locked pools(mBlockPools);
         pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
     }
-    std::string value = server_configurable_flags::GetServerConfigurableFlag(
-            "media_native", "ccodec_rendering_depth", "3");
+    std::string value = GetServerConfigurableFlag("media_native", "ccodec_rendering_depth", "3");
     android::base::ParseInt(value, &mRenderingDepth);
     mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
 }
@@ -996,7 +1002,7 @@
 
     int64_t mediaTimeUs = 0;
     (void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-    if (mIsSurfaceToDisplay) {
+    if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
         trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
         processRenderedFrames(qbo.frameTimestamps);
     } else {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 2d87aa9..41f5ae2 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -334,6 +334,7 @@
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
     std::deque<TrackedFrame> mTrackedFrames;
+    bool mAreRenderMetricsEnabled;
     bool mIsSurfaceToDisplay;
     bool mHasPresentFenceTimes;
 
diff --git a/media/janitors/avic_OWNERS b/media/janitors/avic_OWNERS
index eca9978..81aac3a 100644
--- a/media/janitors/avic_OWNERS
+++ b/media/janitors/avic_OWNERS
@@ -3,4 +3,3 @@
 arifdikici@google.com
 dichenzhang@google.com
 kyslov@google.com
-richardxie@google.com
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bcb4756..3d4e955 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -132,6 +132,7 @@
       mMediaClock(mediaClock),
       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
       mAudioFirstAnchorTimeMediaUs(-1),
+      mAudioAnchorTimeMediaUs(-1),
       mAnchorTimeMediaUs(-1),
       mAnchorNumFramesWritten(-1),
       mVideoLateByUs(0LL),
@@ -433,6 +434,7 @@
 // Called on renderer looper.
 void NuPlayer::Renderer::clearAnchorTime() {
     mMediaClock->clearAnchor();
+    mAudioAnchorTimeMediaUs = -1;
     mAnchorTimeMediaUs = -1;
     mAnchorNumFramesWritten = -1;
 }
@@ -1286,7 +1288,7 @@
     Mutex::Autolock autoLock(mLock);
     // TRICKY: vorbis decoder generates multiple frames with the same
     // timestamp, so only update on the first frame with a given timestamp
-    if (mediaTimeUs == mAnchorTimeMediaUs) {
+    if (mediaTimeUs == mAudioAnchorTimeMediaUs) {
         return;
     }
     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
@@ -1324,6 +1326,7 @@
         }
     }
     mAnchorNumFramesWritten = mNumFramesWritten;
+    mAudioAnchorTimeMediaUs = mediaTimeUs;
     mAnchorTimeMediaUs = mediaTimeUs;
 }
 
diff --git a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
index f0c0a35..574ad3d 100644
--- a/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/include/nuplayer/NuPlayerRenderer.h
@@ -179,6 +179,9 @@
     float mVideoFpsHint;
 
     int64_t mAudioFirstAnchorTimeMediaUs;
+    // previous audio anchor timestamp, in media time base.
+    int64_t mAudioAnchorTimeMediaUs;
+    // previous anchor timestamp (audio or video), in media time base.
     int64_t mAnchorTimeMediaUs;
     int64_t mAnchorNumFramesWritten;
     int64_t mVideoLateByUs;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 1a32e61..2223f24 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -65,11 +65,14 @@
 #include "include/SharedMemoryBuffer.h"
 #include <media/stagefright/omx/OMXUtils.h>
 
+#include <server_configurable_flags/get_flags.h>
+
 namespace android {
 
 typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
 
 using hardware::media::omx::V1_0::Status;
+using server_configurable_flags::GetServerConfigurableFlag;
 
 enum {
     kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
@@ -82,6 +85,11 @@
 
 }
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 // OMX errors are directly mapped into status_t range if
 // there is no corresponding MediaError status code.
 // Use the statusFromOMXError(int32_t omxError) function.
@@ -564,6 +572,9 @@
 ACodec::ACodec()
     : mSampleRate(0),
       mNodeGeneration(0),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
+      mIsWindowToDisplay(false),
+      mHasPresentFenceTimes(false),
       mUsingNativeWindow(false),
       mNativeWindowUsageBits(0),
       mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
@@ -6863,7 +6874,7 @@
 
         int64_t mediaTimeUs = -1;
         buffer->meta()->findInt64("timeUs", &mediaTimeUs);
-        if (mCodec->mIsWindowToDisplay) {
+        if (mCodec->mAreRenderMetricsEnabled && mCodec->mIsWindowToDisplay) {
             mCodec->trackReleasedFrame(frameId, mediaTimeUs, timestampNs);
             mCodec->pollForRenderedFrames();
         } else {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3b9a703..ea24126 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -93,6 +93,7 @@
 using aidl::android::media::IResourceManagerClient;
 using aidl::android::media::IResourceManagerService;
 using aidl::android::media::ClientInfoParcel;
+using server_configurable_flags::GetServerConfigurableFlag;
 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
 
@@ -210,6 +211,7 @@
 // Render metrics
 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
+static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
@@ -283,6 +285,11 @@
     return (err == NO_MEMORY);
 }
 
+static bool areRenderMetricsEnabled() {
+    std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+    return v == "true";
+}
+
 static const int kMaxRetry = 2;
 static const int kMaxReclaimWaitTimeInUs = 500000;  // 0.5s
 static const int kNumBuffersAlign = 16;
@@ -1149,9 +1156,10 @@
       mHavePendingInputBuffers(false),
       mCpuBoostRequested(false),
       mIsSurfaceToDisplay(false),
+      mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
       mVideoRenderQualityTracker(
               VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
-                      server_configurable_flags::GetServerConfigurableFlag)),
+                      GetServerConfigurableFlag)),
       mLatencyUnknown(0),
       mBytesEncoded(0),
       mEarliestEncodedPtsUs(INT64_MAX),
@@ -1291,6 +1299,7 @@
         const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
         if (m.frameReleasedCount > 0) {
             mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
+            mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
             mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
@@ -6182,7 +6191,7 @@
 
         // If rendering to the screen, then schedule a time in the future to poll to see if this
         // frame was ever rendered to seed onFrameRendered callbacks.
-        if (mIsSurfaceToDisplay) {
+        if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
             if (mediaTimeUs != INT64_MIN) {
                 noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
                              : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index 4f12a37..e920bd1 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -154,7 +154,7 @@
 }
 
 VideoRenderQualityTracker::Configuration::Configuration() {
-    enabled = true;
+    enabled = false;
 
     // Assume that the app is skipping frames because it's detected that the frame couldn't be
     // rendered in time.
@@ -455,6 +455,8 @@
     if (mMetrics.firstRenderTimeUs == 0) {
         mMetrics.firstRenderTimeUs = actualRenderTimeUs;
     }
+    // Capture the timestamp at which the last frame was rendered
+    mMetrics.lastRenderTimeUs = actualRenderTimeUs;
 
     mMetrics.frameRenderedCount++;
 
diff --git a/media/libstagefright/include/media/stagefright/ACodec.h b/media/libstagefright/include/media/stagefright/ACodec.h
index ec4a04c..f876bc6 100644
--- a/media/libstagefright/include/media/stagefright/ACodec.h
+++ b/media/libstagefright/include/media/stagefright/ACodec.h
@@ -258,10 +258,13 @@
     int32_t mNodeGeneration;
     sp<TAllocator> mAllocator[2];
 
-    bool mUsingNativeWindow;
-    sp<ANativeWindow> mNativeWindow;
+    std::deque<TrackedFrame> mTrackedFrames; // render information for buffers sent to a window
+    bool mAreRenderMetricsEnabled;
     bool mIsWindowToDisplay;
     bool mHasPresentFenceTimes;
+
+    bool mUsingNativeWindow;
+    sp<ANativeWindow> mNativeWindow;
     int mNativeWindowUsageBits;
     android_native_rect_t mLastNativeWindowCrop;
     int32_t mLastNativeWindowDataSpace;
@@ -276,7 +279,6 @@
     // format updates. This will equal to mOutputFormat until the first actual frame is received.
     sp<AMessage> mBaseOutputFormat;
 
-    std::deque<TrackedFrame> mTrackedFrames; // render information for buffers sent to a window
     std::vector<BufferInfo> mBuffers[2];
     bool mPortEOS[2];
     status_t mInputEOSResult;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index da15ed4..ceba7d7 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -575,6 +575,7 @@
     sp<ALooper> mCryptoLooper;
 
     bool mIsSurfaceToDisplay;
+    bool mAreRenderMetricsEnabled;
     PlaybackDurationAccumulator mPlaybackDurationAccumulator;
     VideoRenderQualityTracker mVideoRenderQualityTracker;
 
diff --git a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
index 82ba81c..a656e6e 100644
--- a/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
+++ b/media/libstagefright/include/media/stagefright/VideoRenderQualityTracker.h
@@ -38,6 +38,9 @@
     // The render time of the first video frame.
     int64_t firstRenderTimeUs;
 
+    // The render time of the last video frame.
+    int64_t lastRenderTimeUs;
+
     // The number of frames released to be rendered.
     int64_t frameReleasedCount;
 
diff --git a/media/module/codecs/m4v_h263/OWNERS b/media/module/codecs/m4v_h263/OWNERS
new file mode 100644
index 0000000..e537138
--- /dev/null
+++ b/media/module/codecs/m4v_h263/OWNERS
@@ -0,0 +1,4 @@
+# owners for frameworks/av/media/module/codecs/m4v_h263
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
+essick@google.com
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 1d50621..c81d9ea 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -54,7 +54,7 @@
 
     sp<PlayAudioOpCallback> mOpCallback;
     // called by PlayAudioOpCallback when OP_PLAY_AUDIO is updated in AppOp callback
-    void checkPlayAudioForUsage();
+    void checkPlayAudioForUsage(bool doBroadcast);
 
     wp<IAfThreadBase> mThread;
     std::atomic_bool mHasOpPlayAudio;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 257f9e4..297147e 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -610,7 +610,9 @@
 
 void OpPlayAudioMonitor::onFirstRef()
 {
-    checkPlayAudioForUsage();
+    // make sure not to broadcast the initial state since it is not needed and could
+    // cause a deadlock since this method can be called with the mThread->mLock held
+    checkPlayAudioForUsage(/*doBroadcast=*/false);
     if (mAttributionSource.packageName.has_value()) {
         mOpCallback = new PlayAudioOpCallback(this);
         mAppOpsManager.startWatchingMode(AppOpsManager::OP_PLAY_AUDIO,
@@ -625,7 +627,7 @@
 // Note this method is never called (and never to be) for audio server / patch record track
 // - not called from constructor due to check on UID,
 // - not called from PlayAudioOpCallback because the callback is not installed in this case
-void OpPlayAudioMonitor::checkPlayAudioForUsage()
+void OpPlayAudioMonitor::checkPlayAudioForUsage(bool doBroadcast)
 {
     const bool hasAppOps = mAttributionSource.packageName.has_value()
         && mAppOpsManager.checkAudioOpNoThrow(
@@ -635,11 +637,13 @@
     bool shouldChange = !hasAppOps;  // check if we need to update.
     if (mHasOpPlayAudio.compare_exchange_strong(shouldChange, hasAppOps)) {
         ALOGD("OpPlayAudio: track:%d usage:%d %smuted", mId, mUsage, hasAppOps ? "not " : "");
-        auto thread = mThread.promote();
-        if (thread != nullptr && thread->type() == IAfThreadBase::OFFLOAD) {
-            // Wake up Thread if offloaded, otherwise it may be several seconds for update.
-            Mutex::Autolock _l(thread->mutex());
-            thread->broadcast_l();
+        if (doBroadcast) {
+            auto thread = mThread.promote();
+            if (thread != nullptr && thread->type() == IAfThreadBase::OFFLOAD) {
+                // Wake up Thread if offloaded, otherwise it may be several seconds for update.
+                Mutex::Autolock _l(thread->mutex());
+                thread->broadcast_l();
+            }
         }
     }
 }
@@ -657,7 +661,7 @@
     }
     sp<OpPlayAudioMonitor> monitor = mMonitor.promote();
     if (monitor != NULL) {
-        monitor->checkPlayAudioForUsage();
+        monitor->checkPlayAudioForUsage(/*doBroadcast=*/true);
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index febccac..1e57edd 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -102,9 +102,13 @@
     void setVolume(float volumeDb) { mCurVolumeDb = volumeDb; }
     float getVolume() const { return mCurVolumeDb; }
 
+    void setIsVoice(bool isVoice) { mIsVoice = isVoice; }
+    bool isVoice() const { return mIsVoice; }
+
 private:
     int mMuteCount = 0; /**< mute request counter */
     float mCurVolumeDb = NAN; /**< current volume in dB. */
+    bool mIsVoice = false; /** true if this volume source is used for voice call volume */
 };
 /**
  * Note: volume activities shall be indexed by CurvesId if we want to allow multiple
@@ -162,7 +166,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     /**
      * @brief setStopTime set the stop time due to the client stoppage or a re routing of this
@@ -222,17 +227,25 @@
     {
         return mVolumeActivities[vs].decMuteCount();
     }
-    void setCurVolume(VolumeSource vs, float volumeDb)
+    void setCurVolume(VolumeSource vs, float volumeDb, bool isVoiceVolSrc)
     {
         // Even if not activity for this source registered, need to create anyway
         mVolumeActivities[vs].setVolume(volumeDb);
+        mVolumeActivities[vs].setIsVoice(isVoiceVolSrc);
     }
     float getCurVolume(VolumeSource vs) const
     {
         return mVolumeActivities.find(vs) != std::end(mVolumeActivities) ?
                     mVolumeActivities.at(vs).getVolume() : NAN;
     }
-
+    VolumeSource getVoiceSource() {
+        for (const auto &iter : mVolumeActivities) {
+            if (iter.second.isVoice()) {
+                return iter.first;
+            }
+        }
+        return VOLUME_SOURCE_NONE;
+    }
     bool isStrategyActive(product_strategy_t ps, uint32_t inPastMs = 0, nsecs_t sysTime = 0) const
     {
         return mRoutingActivities.find(ps) != std::end(mRoutingActivities)?
@@ -381,7 +394,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& device,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
@@ -484,7 +498,8 @@
                            VolumeSource volumeSource, const StreamTypeVector &streams,
                            const DeviceTypeSet& deviceTypes,
                            uint32_t delayMs,
-                           bool force);
+                           bool force,
+                           bool isVoiceVolSrc = false);
 
     virtual void toAudioPortConfig(struct audio_port_config *dstConfig,
                            const struct audio_port_config *srcConfig = NULL) const;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 8b23311..2f424b8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -163,7 +163,8 @@
                                       const StreamTypeVector &/*streams*/,
                                       const DeviceTypeSet& deviceTypes,
                                       uint32_t delayMs,
-                                      bool force)
+                                      bool force,
+                                      bool isVoiceVolSrc)
 {
 
     if (!supportedDevices().containsDeviceAmongTypes(deviceTypes)) {
@@ -176,7 +177,7 @@
     // - the force flag is set
     if (volumeDb != getCurVolume(volumeSource) || force) {
         ALOGV("%s for volumeSrc %d, volume %f, delay %d", __func__, volumeSource, volumeDb, delayMs);
-        setCurVolume(volumeSource, volumeDb);
+        setCurVolume(volumeSource, volumeDb, isVoiceVolSrc);
         return true;
     }
     return false;
@@ -510,11 +511,12 @@
                                         VolumeSource vs, const StreamTypeVector &streamTypes,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
-            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force)) {
+            volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
         return false;
     }
     if (streams.empty()) {
@@ -560,6 +562,10 @@
     float volumeAmpl = Volume::DbToAmpl(getCurVolume(vs));
     if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) {
         mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
+        VolumeSource callVolSrc = getVoiceSource();
+        if (callVolSrc != VOLUME_SOURCE_NONE) {
+            setCurVolume(callVolSrc, getCurVolume(vs), true);
+        }
     }
     for (const auto &stream : streams) {
         ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
@@ -788,10 +794,11 @@
                                         VolumeSource volumeSource, const StreamTypeVector &streams,
                                         const DeviceTypeSet& deviceTypes,
                                         uint32_t delayMs,
-                                        bool force)
+                                        bool force,
+                                        bool isVoiceVolSrc)
 {
     bool changed = AudioOutputDescriptor::setVolume(
-            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force);
+            volumeDb, muted, volumeSource, streams, deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (changed) {
       // TODO: use gain controller on source device if any to adjust volume
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index fe15cf4..44648de 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -7844,8 +7844,8 @@
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
-    outputDesc->setVolume(
-            volumeDb, muted, volumeSource, curves.getStreamTypes(), deviceTypes, delayMs, force);
+    outputDesc->setVolume(volumeDb, muted, volumeSource, curves.getStreamTypes(),
+            deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
         float voiceVolume;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index fbaa665..1b1662b 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -766,7 +766,7 @@
                 "Permission Denial: no permission to configure camera id mapping");
     }
     TCameraIdRemapping cameraIdRemappingMap{};
-    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, cameraIdRemappingMap);
+    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
     if (!parseStatus.isOk()) {
         return parseStatus;
     }
@@ -776,25 +776,37 @@
 
 Status CameraService::parseCameraIdRemapping(
         const hardware::CameraIdRemapping& cameraIdRemapping,
-        TCameraIdRemapping cameraIdRemappingMap) {
+        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
     String16 packageName;
     String8 cameraIdToReplace, updatedCameraId;
-    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemapping) {
+    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
         packageName = packageIdRemapping.packageName;
         if (packageName == String16("")) {
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
                     "CameraIdRemapping: Package name cannot be empty");
         }
-        if (packageIdRemapping.cameraIdToReplace.size()
-            != packageIdRemapping.updatedCameraId.size()) {
+
+        if (packageIdRemapping.cameraIdsToReplace.size()
+            != packageIdRemapping.updatedCameraIds.size()) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
                     "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
                      String8(packageName).c_str());
         }
-        for(size_t i = 0; i < packageIdRemapping.cameraIdToReplace.size(); i++) {
-            cameraIdToReplace = String8(packageIdRemapping.cameraIdToReplace[i]);
-            updatedCameraId = String8(packageIdRemapping.updatedCameraId[i]);
-            cameraIdRemappingMap[packageName][cameraIdToReplace] = updatedCameraId;
+        for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
+            cameraIdToReplace = String8(packageIdRemapping.cameraIdsToReplace[i]);
+            updatedCameraId = String8(packageIdRemapping.updatedCameraIds[i]);
+            if (cameraIdToReplace == String8("") || updatedCameraId == String8("")) {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
+                        String8(packageName).c_str());
+            }
+            if (cameraIdToReplace == updatedCameraId) {
+                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
+                        " as updatedCameraId for %s",
+                        String8(packageName).c_str());
+            }
+            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
         }
     }
     return Status::ok();
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index b9966b3..65b11e7 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -964,8 +964,8 @@
 
     /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
     binder::Status parseCameraIdRemapping(
-        const hardware::CameraIdRemapping& cameraIdRemapping,
-        TCameraIdRemapping cameraIdRemappingMap);
+            const hardware::CameraIdRemapping& cameraIdRemapping,
+            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
 
     /**
      * Resolve the (potentially remapped) camera Id to use for packageName.