Merge "Effect AIDL: support output accumulate" into 24D1-dev am: 2bb094d270

Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/26560044

Change-Id: I44618b4d8620cbb1cf19bb8f47dd6834210495c8
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 1f50570..1dc45c3 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -4,6 +4,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_hsum_permission"
+     is_exported: true
      description: "Camera access by headless system user"
      bug: "273539631"
 }
@@ -11,6 +12,7 @@
 flag {
      namespace: "camera_platform"
      name: "concert_mode"
+     is_exported: true
      description: "Introduces a new concert mode camera extension type"
      bug: "297083874"
 }
@@ -18,6 +20,7 @@
 flag {
      namespace: "camera_platform"
      name: "feature_combination_query"
+     is_exported: true
      description: "Query feature combination support and session specific characteristics"
      bug: "309627704"
 }
@@ -39,6 +42,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_manual_flash_strength_control"
+     is_exported: true
      description: "Flash brightness level control in manual flash mode"
      bug: "238348881"
 }
@@ -74,6 +78,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_ae_mode_low_light_boost"
+     is_exported: true
      description: "An AE mode that enables increased brightening in low light scenes"
      bug: "312803148"
 }
@@ -95,6 +100,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_extensions_characteristics_get"
+     is_exported: true
      description: "Enable get extension specific camera characteristics API"
      bug: "280649914"
 }
@@ -116,6 +122,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_device_setup"
+     is_exported: true
      description: "Create an intermediate Camera Device class for limited CameraDevice access."
      bug: "320741775"
 }
@@ -123,6 +130,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_privacy_allowlist"
+     is_exported: true
      description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
      bug: "282814430"
 }
@@ -130,6 +138,25 @@
 flag {
      namespace: "camera_platform"
      name: "extension_10_bit"
+     is_exported: true
      description: "Enables 10-bit support in the camera extensions."
      bug: "316375635"
 }
+
+flag {
+     namespace: "camera_platform"
+     name: "surface_leak_fix"
+     description: "Address Surface release leaks in CaptureRequest"
+     bug: "324071855"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "concert_mode_api"
+     description: "Covers the eyes free videography public facing API"
+     bug: "297083874"
+}
+
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index 3092091..a2b6a82 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -14,6 +14,7 @@
 
 flag {
   name: "dynamic_color_aspects"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for dynamic color aspect support"
   bug: "297914560"
@@ -21,6 +22,7 @@
 
 flag {
   name: "hlg_editing"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for HLG editing support"
   bug: "316397061"
@@ -28,6 +30,7 @@
 
 flag {
   name: "in_process_sw_audio_codec"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for in-process software audio codec API"
   bug: "297922713"
@@ -48,7 +51,15 @@
 }
 
 flag {
+  name: "native_capabilites"
+  namespace: "codec_fwk"
+  description: "Feature flag for native codec capabilities"
+  bug: "306023029"
+}
+
+flag {
   name: "null_output_surface"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for null output Surface API"
   bug: "297920102"
@@ -63,6 +74,7 @@
 
 flag {
   name: "region_of_interest"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flag for region of interest API"
   bug: "299191092"
@@ -74,3 +86,10 @@
   description: "Feature flag for region of interest support"
   bug: "325549730"
 }
+
+flag {
+  name: "teamfood"
+  namespace: "codec_fwk"
+  description: "Feature flag to track teamfood population"
+  bug: "328770262"
+}
diff --git a/media/aconfig/mediacodec_flags.aconfig b/media/aconfig/mediacodec_flags.aconfig
index 4d1e5ca..3cc9a1a 100644
--- a/media/aconfig/mediacodec_flags.aconfig
+++ b/media/aconfig/mediacodec_flags.aconfig
@@ -7,22 +7,23 @@
 # ******************************************************************
 
 flag {
-  name: "large_audio_frame"
+  name: "aidl_hal"
   namespace: "codec_fwk"
-  description: "Feature flags for large audio frame support"
-  bug: "297219557"
+  description: "Feature flags for enabling AIDL HAL handling"
+  bug: "251850069"
 }
 
 flag {
   name: "codec_importance"
+  is_exported: true
   namespace: "codec_fwk"
   description: "Feature flags for media codec importance"
   bug: "297929011"
 }
 
 flag {
-  name: "aidl_hal"
+  name: "large_audio_frame"
   namespace: "codec_fwk"
-  description: "Feature flags for enabling AIDL HAL handling"
-  bug: "251850069"
+  description: "Feature flags for large audio frame support"
+  bug: "297219557"
 }
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index cdbadc2..1df5727 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -60,6 +60,13 @@
 }
 
 flag {
+    name: "spatializer_upmix"
+    namespace: "media_audio"
+    description: "Enable spatializer upmix"
+    bug: "323985367"
+}
+
+flag {
     name: "stereo_spatialization"
     namespace: "media_audio"
     description: "Enable stereo channel mask for spatialization."
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index cfdf1ab..f0945de 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -23,6 +23,7 @@
 
 flag {
     name: "feature_spatial_audio_headtracking_low_latency"
+    is_exported: true
     namespace: "media_audio"
     description: "Define feature for low latency headtracking for SA"
     bug: "324291076"
@@ -30,6 +31,7 @@
 
 flag {
     name: "focus_exclusive_with_recording"
+    is_exported: true
     namespace: "media_audio"
     description:
         "Audio focus GAIN_TRANSIENT_EXCLUSIVE only mutes"
@@ -39,6 +41,7 @@
 
 flag {
     name: "foreground_audio_control"
+    is_exported: true
     namespace: "media_audio"
     description:
         "Audio focus gain requires FGS or delegation to "
@@ -49,6 +52,7 @@
 # TODO remove
 flag {
     name: "focus_freeze_test_api"
+    is_exported: true
     namespace: "media_audio"
     description: "\
  AudioManager audio focus test APIs:\
@@ -62,6 +66,7 @@
 
 flag {
     name: "loudness_configurator_api"
+    is_exported: true
     namespace: "media_audio"
     description: "\
 Enable the API for providing loudness metadata and CTA-2075 \
@@ -79,6 +84,7 @@
 
 flag {
     name: "sco_managed_by_audio"
+    is_exported: true
     namespace: "media_audio"
     description: "\
 Enable new implementation of headset profile device connection and\
@@ -88,6 +94,7 @@
 
 flag {
     name: "supported_device_types_api"
+    is_exported: true
     namespace: "media_audio"
     description: "Surface new API method AudioManager.getSupportedDeviceTypes()"
     bug: "307537538"
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 72a1e6c..28b6c7f 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -24,6 +24,7 @@
 
 flag {
     name: "audio_mix_test_api"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable new Test APIs that provide access to registered AudioMixes on system server and native side."
     bug: "309080867"
@@ -32,6 +33,7 @@
 
 flag {
     name: "audio_policy_update_mixing_rules_api"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
     bug: "293874525"
@@ -39,6 +41,7 @@
 
 flag {
     name: "enable_fade_manager_configuration"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable Fade Manager Configuration support to determine fade properties"
     bug: "307354764"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index efb643f..1620e1b 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -8,6 +8,7 @@
 
 flag {
     name: "virtual_ump"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable virtual UMP MIDI."
     bug: "291115176"
diff --git a/media/codec2/hal/aidl/Component.cpp b/media/codec2/hal/aidl/Component.cpp
index eb64a4a..87c9d87 100644
--- a/media/codec2/hal/aidl/Component.cpp
+++ b/media/codec2/hal/aidl/Component.cpp
@@ -487,7 +487,19 @@
     if (__builtin_available(android __ANDROID_API_T__, *)) {
         std::shared_ptr<C2Component::Listener> c2listener;
         if (mMultiAccessUnitIntf) {
-            mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+            std::shared_ptr<C2Allocator> allocator;
+            std::shared_ptr<C2BlockPool> linearPool;
+            std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+            if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+                ::android::C2PlatformAllocatorDesc desc;
+                desc.allocatorId = allocator->getId();
+                if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                    if (linearPool) {
+                        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                                mMultiAccessUnitIntf, linearPool);
+                    }
+                }
+            }
         }
         c2listener = mMultiAccessUnitHelper ?
                 std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 3a71520..8086ef2 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -96,18 +96,23 @@
     return (C2Component::kind_t)(mKind.value);
 }
 
-void MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
-        uint32_t &sampleRate_, uint32_t &channelCount_) const {
+bool MultiAccessUnitInterface::getDecoderSampleRateAndChannelCount(
+        uint32_t * const sampleRate_, uint32_t * const channelCount_) const {
+    if (sampleRate_ == nullptr || channelCount_ == nullptr) {
+        return false;
+    }
     if (mC2ComponentIntf) {
         C2StreamSampleRateInfo::output sampleRate;
         C2StreamChannelCountInfo::output channelCount;
         c2_status_t res = mC2ComponentIntf->query_vb(
                 {&sampleRate, &channelCount}, {}, C2_MAY_BLOCK, nullptr);
-        if (res == C2_OK) {
-            sampleRate_ = sampleRate.value;
-            channelCount_ = channelCount.value;
+        if (res == C2_OK && sampleRate.value > 0 && channelCount.value > 0) {
+            *sampleRate_ = sampleRate.value;
+            *channelCount_ = channelCount.value;
+            return true;
         }
     }
+    return false;
 }
 
 //C2MultiAccessUnitBuffer
@@ -121,12 +126,12 @@
 
 //MultiAccessUnitHelper
 MultiAccessUnitHelper::MultiAccessUnitHelper(
-        const std::shared_ptr<MultiAccessUnitInterface>& intf):
+        const std::shared_ptr<MultiAccessUnitInterface>& intf,
+        std::shared_ptr<C2BlockPool>& linearPool):
         mInit(false),
-        mInterface(intf) {
-    std::shared_ptr<C2AllocatorStore> store = GetCodec2PlatformAllocatorStore();
-    if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mLinearAllocator) == C2_OK) {
-        mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, ++mBlockPoolId);
+        mInterface(intf),
+        mLinearPool(linearPool) {
+    if (mLinearPool) {
         mInit = true;
     }
 }
@@ -164,6 +169,7 @@
         std::list<std::unique_ptr<C2Work>> * const worklist) {
     if (worklist == nullptr) {
         LOG(ERROR) << "Provided null worklist for error()";
+        mFrameHolder.clear();
         return C2_OK;
     }
     std::unique_lock<std::mutex> l(mLock);
@@ -272,6 +278,7 @@
                 LOG(ERROR) << "ERROR: Work has Large frame info but has no linear blocks.";
                 return C2_CORRUPTED;
             }
+            frameInfo.mInputC2Ref = inBuffers;
             const std::vector<C2ConstLinearBlock>& multiAU =
                     inBuffers.front()->data().linearBlocks();
             std::shared_ptr<const C2AccessUnitInfos::input> auInfo =
@@ -320,26 +327,10 @@
             }
         }
         if (!processedWork->empty()) {
-            {
-                C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
-                if (mInterface->kind() == C2Component::KIND_DECODER) {
-                    uint32_t sampleRate = 0;
-                    uint32_t channelCount = 0;
-                    uint32_t frameSize = 0;
-                    mInterface->getDecoderSampleRateAndChannelCount(
-                            sampleRate, channelCount);
-                    if (sampleRate > 0 && channelCount > 0) {
-                        frameSize = channelCount * 2;
-                        multiAccessParams.maxSize =
-                                (multiAccessParams.maxSize / frameSize) * frameSize;
-                        multiAccessParams.thresholdSize =
-                                (multiAccessParams.thresholdSize / frameSize) * frameSize;
-                    }
-                }
-                frameInfo.mLargeFrameTuning = multiAccessParams;
-                std::lock_guard<std::mutex> l(mLock);
-                mFrameHolder.push_back(std::move(frameInfo));
-            }
+            C2LargeFrame::output multiAccessParams = mInterface->getLargeFrameParam();
+            frameInfo.mLargeFrameTuning = multiAccessParams;
+            std::lock_guard<std::mutex> l(mLock);
+            mFrameHolder.push_back(std::move(frameInfo));
         }
     }
     return C2_OK;
@@ -506,6 +497,20 @@
             frame.reset();
             return C2_OK;
         }
+        int64_t sampleTimeUs = 0;
+        uint32_t frameSize = 0;
+        uint32_t sampleRate = 0;
+        uint32_t channelCount = 0;
+        if (mInterface->getDecoderSampleRateAndChannelCount(&sampleRate, &channelCount)) {
+            sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
+            frameSize = channelCount * 2;
+            if (mInterface->kind() == C2Component::KIND_DECODER) {
+                frame.mLargeFrameTuning.maxSize =
+                        (frame.mLargeFrameTuning.maxSize / frameSize) * frameSize;
+                frame.mLargeFrameTuning.thresholdSize =
+                        (frame.mLargeFrameTuning.thresholdSize / frameSize) * frameSize;
+            }
+        }
         c2_status_t c2ret = allocateWork(frame, true);
         if (c2ret != C2_OK) {
             return c2ret;
@@ -520,15 +525,7 @@
         outputFramedata.infoBuffers.insert(outputFramedata.infoBuffers.begin(),
                 (*worklet)->output.infoBuffers.begin(),
                 (*worklet)->output.infoBuffers.end());
-        int64_t sampleTimeUs = 0;
-        uint32_t frameSize = 0;
-        uint32_t sampleRate = 0;
-        uint32_t channelCount = 0;
-        mInterface->getDecoderSampleRateAndChannelCount(sampleRate, channelCount);
-        if (sampleRate > 0 && channelCount > 0) {
-            sampleTimeUs = (1000000u) / (sampleRate * channelCount * 2);
-            frameSize = channelCount * 2;
-        }
+
         LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
                 << " threshold " << frame.mLargeFrameTuning.thresholdSize;
         if ((*worklet)->output.buffers.size() > 0) {
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index a90ae56..bb4464c 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -44,8 +44,8 @@
     bool isValidField(const C2ParamField &field) const;
 
 protected:
-    void getDecoderSampleRateAndChannelCount(
-            uint32_t &sampleRate_, uint32_t &channelCount_) const;
+    bool getDecoderSampleRateAndChannelCount(
+            uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
     const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
     std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
     C2ComponentKindSetting mKind;
@@ -58,7 +58,8 @@
 struct MultiAccessUnitHelper {
 public:
     MultiAccessUnitHelper(
-            const std::shared_ptr<MultiAccessUnitInterface>& intf);
+            const std::shared_ptr<MultiAccessUnitInterface>& intf,
+            std::shared_ptr<C2BlockPool> &linearPool);
 
     virtual ~MultiAccessUnitHelper();
 
@@ -153,6 +154,11 @@
          */
         std::unique_ptr<C2Work> mLargeWork;
 
+        /*
+         * For holding a reference to the incoming buffer
+         */
+        std::vector<std::shared_ptr<C2Buffer>> mInputC2Ref;
+
         MultiAccessUnitInfo(C2WorkOrdinalStruct ordinal):inOrdinal(ordinal) {
 
         }
@@ -197,8 +203,6 @@
     C2BlockPool::local_id_t mBlockPoolId;
     // C2Blockpool for output buffer allocation
     std::shared_ptr<C2BlockPool> mLinearPool;
-    // Allocator for output buffer allocation
-    std::shared_ptr<C2Allocator> mLinearAllocator;
     // FrameIndex for the current outgoing work
     std::atomic_uint64_t mFrameIndex;
     // Mutex to protect mFrameHolder
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index e32e6ae..0259d90 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -570,7 +570,19 @@
 void Component::initListener(const sp<Component>& self) {
     std::shared_ptr<C2Component::Listener> c2listener;
     if (mMultiAccessUnitIntf) {
-        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2BlockPool> linearPool;
+        std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+        if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+            ::android::C2PlatformAllocatorDesc desc;
+            desc.allocatorId = allocator->getId();
+            if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                if (linearPool) {
+                    mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                            mMultiAccessUnitIntf, linearPool);
+                }
+            }
+        }
     }
     c2listener = mMultiAccessUnitHelper ?
             std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
index a34cef1..d1f0fb5 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/VtsHalMediaC2V1_0TargetMasterTest.cpp
@@ -84,14 +84,11 @@
 }
 
 TEST_P(Codec2MasterHalTest, MustUseAidlBeyond202404) {
-    static int sBoardFirstApiLevel = android::base::GetIntProperty("ro.board.first_api_level", 0);
-    static int sBoardApiLevel = android::base::GetIntProperty("ro.board.api_level", 0);
-    if (sBoardFirstApiLevel < 202404 && sBoardApiLevel < 202404) {
-        GTEST_SKIP() << "board first level less than 202404:"
-                     << " ro.board.first_api_level = " << sBoardFirstApiLevel
-                     << " ro.board.api_level = " << sBoardApiLevel;
+    static int sVendorApiLevel = android::base::GetIntProperty("ro.vendor.api_level", 0);
+    if (sVendorApiLevel < 202404) {
+        GTEST_SKIP() << "vendor api level less than 202404: " << sVendorApiLevel;
     }
-    ALOGV("HidlCodecAllowed Test");
+    ALOGV("MustUseAidlBeyond202404 Test");
 
     EXPECT_NE(mClient->getAidlBase(), nullptr) << "android.hardware.media.c2 MUST use AIDL "
                                                << "for chipsets launching at 202404 or above";
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 09e5709..d34d84e 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -583,7 +583,19 @@
 void Component::initListener(const sp<Component>& self) {
     std::shared_ptr<C2Component::Listener> c2listener;
     if (mMultiAccessUnitIntf) {
-        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2BlockPool> linearPool;
+        std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+        if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+            ::android::C2PlatformAllocatorDesc desc;
+            desc.allocatorId = allocator->getId();
+            if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                if (linearPool) {
+                    mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                            mMultiAccessUnitIntf, linearPool);
+                }
+            }
+        }
     }
     c2listener = mMultiAccessUnitHelper ?
             std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 0fe16e3..f78e827 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -610,7 +610,19 @@
 void Component::initListener(const sp<Component>& self) {
     std::shared_ptr<C2Component::Listener> c2listener;
     if (mMultiAccessUnitIntf) {
-        mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(mMultiAccessUnitIntf);
+        std::shared_ptr<C2Allocator> allocator;
+        std::shared_ptr<C2BlockPool> linearPool;
+        std::shared_ptr<C2AllocatorStore> store = ::android::GetCodec2PlatformAllocatorStore();
+        if(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &allocator) == C2_OK) {
+            ::android::C2PlatformAllocatorDesc desc;
+            desc.allocatorId = allocator->getId();
+            if (C2_OK == CreateCodec2BlockPool(desc, mComponent, &linearPool)) {
+                if (linearPool) {
+                    mMultiAccessUnitHelper = std::make_shared<MultiAccessUnitHelper>(
+                            mMultiAccessUnitIntf, linearPool);
+                }
+            }
+        }
     }
     c2listener = mMultiAccessUnitHelper ?
             std::make_shared<MultiAccessUnitListener>(self, mMultiAccessUnitHelper) :
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 18c2468..4de2347 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -45,6 +45,7 @@
 
     static_libs: [
         "libSurfaceFlingerProperties",
+        "aconfig_mediacodec_flags_c_lib",
         "android.media.codec-aconfig-cc",
     ],
 
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 40656ff..f58dc65 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -552,8 +552,7 @@
     }
 
     ssize_t result = -1;
-    ssize_t codecDataOffset = 0;
-    size_t inBufferOffset = 0;
+    size_t srcOffset = offset;
     size_t outBufferSize = 0;
     uint32_t cryptoInfoIdx = 0;
     int32_t heapSeqNum = getHeapSeqNum(memory);
@@ -565,18 +564,20 @@
     for (int i = 0; i < bufferInfos->value.size(); i++) {
         if (bufferInfos->value[i].mSize > 0) {
             std::unique_ptr<CodecCryptoInfo> info = std::move(cryptoInfos->value[cryptoInfoIdx++]);
+            src.offset = srcOffset;
+            src.size = bufferInfos->value[i].mSize;
             result = mCrypto->decrypt(
                     (uint8_t*)info->mKey,
                     (uint8_t*)info->mIv,
                     info->mMode,
                     info->mPattern,
                     src,
-                    inBufferOffset,
+                    0,
                     info->mSubSamples,
                     info->mNumSubSamples,
                     dst,
                     errorDetailMsg);
-            inBufferOffset += bufferInfos->value[i].mSize;
+            srcOffset += bufferInfos->value[i].mSize;
             if (result < 0) {
                 ALOGI("[%s] attachEncryptedBuffers: decrypt failed: result = %zd",
                         mName, result);
@@ -599,7 +600,7 @@
         wView.setOffset(0);
     }
     std::shared_ptr<C2Buffer> c2Buffer{C2Buffer::CreateLinearBuffer(
-            block->share(codecDataOffset, outBufferSize - codecDataOffset, C2Fence{}))};
+            block->share(0, outBufferSize, C2Fence{}))};
     if (!buffer->copy(c2Buffer)) {
         ALOGI("[%s] attachEncryptedBuffers: buffer copy failed", mName);
         return -ENOSYS;
@@ -980,8 +981,7 @@
     }
     // size of cryptoInfo and accessUnitInfo should be the same?
     ssize_t result = -1;
-    ssize_t codecDataOffset = 0;
-    size_t inBufferOffset = 0;
+    size_t srcOffset = 0;
     size_t outBufferSize = 0;
     uint32_t cryptoInfoIdx = 0;
     {
@@ -994,6 +994,7 @@
         encryptedBuffer->getMappedBlock(&mappedBlock);
         hardware::drm::V1_0::SharedBuffer source;
         encryptedBuffer->fillSourceBuffer(&source);
+        srcOffset = source.offset;
         for (int i = 0 ; i < bufferInfos->value.size(); i++) {
             if (bufferInfos->value[i].mSize > 0) {
                 std::unique_ptr<CodecCryptoInfo> info =
@@ -1004,18 +1005,20 @@
                     // no data so we only populate the bufferInfo
                     result = 0;
                 } else {
+                    source.offset = srcOffset;
+                    source.size = bufferInfos->value[i].mSize;
                     result = mCrypto->decrypt(
                             (uint8_t*)info->mKey,
                             (uint8_t*)info->mIv,
                             info->mMode,
                             info->mPattern,
                             source,
-                            inBufferOffset,
+                            buffer->offset(),
                             info->mSubSamples,
                             info->mNumSubSamples,
                             destination,
                             errorDetailMsg);
-                    inBufferOffset += bufferInfos->value[i].mSize;
+                    srcOffset += bufferInfos->value[i].mSize;
                     if (result < 0) {
                         ALOGI("[%s] decrypt failed: result=%zd", mName, result);
                         return result;
@@ -1028,7 +1031,7 @@
                 }
             }
         }
-        buffer->setRange(codecDataOffset, outBufferSize - codecDataOffset);
+        buffer->setRange(0, outBufferSize);
     }
     return queueInputBufferInternal(buffer, block, bufferSize);
 }
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 8dce789..37a7a4f 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -20,6 +20,7 @@
 
 #include <strings.h>
 
+#include <com_android_media_codec_flags.h>
 #include <android_media_codec.h>
 
 #include <C2Component.h>
@@ -755,7 +756,8 @@
                 addSupportedColorFormats(
                         intf, caps.get(), trait, mediaType, it->second);
 
-                if (android::media::codec::provider_->large_audio_frame_finish()) {
+                if (com::android::media::codec::flags::provider_->large_audio_frame()
+                        && android::media::codec::provider_->large_audio_frame_finish()) {
                     // Adding feature-multiple-frames when C2LargeFrame param is present
                     if (trait.domain == C2Component::DOMAIN_AUDIO) {
                         std::vector<std::shared_ptr<C2ParamDescriptor>> params;
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index e780f4f..cd7679c 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -278,3 +278,9 @@
         mDataQueue->eraseMemory();
     }
 }
+
+void AudioEndpoint::eraseEmptyDataMemory(int32_t numFrames) {
+    if (mDataQueue != nullptr) {
+        mDataQueue->eraseEmptyMemory(numFrames);
+    }
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index b117572..7e97c6a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -107,6 +107,8 @@
      */
     void eraseDataMemory();
 
+    void eraseEmptyDataMemory(int32_t numFrames);
+
     void freeDataQueue() { mDataQueue.reset(); }
 
     void dump() const;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 7648e25..b2e93f0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -575,10 +575,20 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    // For playback, sleep until all the audio data has played.
+    // Then clear the buffer to prevent noise.
+    prepareBuffersForStop();
+
     mClockModel.stop(AudioClock::getNanoseconds());
     setState(AAUDIO_STREAM_STATE_STOPPING);
     mAtomicInternalTimestamp.clear();
 
+#if 0
+    // Simulate very slow CPU, force race condition where the
+    // DSP keeps playing after we stop writing.
+    AudioClock::sleepForNanos(800 * AAUDIO_NANOS_PER_MILLISECOND);
+#endif
+
     result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index a5981b1..20d55f9 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -123,6 +123,8 @@
 
     virtual void prepareBuffersForStart() {}
 
+    virtual void prepareBuffersForStop() {}
+
     virtual void advanceClientToMatchServerPosition(int32_t serverMargin) = 0;
 
     virtual void onFlushFromServer() {}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 5d4c3d4..0427777 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,8 @@
 
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
+#include <algorithm>
+
 #include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
@@ -108,6 +110,61 @@
     mAudioEndpoint->eraseDataMemory();
 }
 
+void AudioStreamInternalPlay::prepareBuffersForStop() {
+    // If this is a shared stream and the FIFO is being read by the mixer then
+    // we don't have to worry about the DSP reading past the valid data. We can skip all this.
+    if(!mAudioEndpoint->isFreeRunning()) {
+        return;
+    }
+    // Sleep until the DSP has read all of the data written.
+    int64_t validFramesInBuffer = getFramesWritten() - getFramesRead();
+    if (validFramesInBuffer >= 0) {
+        int64_t emptyFramesInBuffer = ((int64_t) getBufferCapacity()) - validFramesInBuffer;
+
+        // Prevent stale data from being played if the DSP is still running.
+        // Erase some of the FIFO memory in front of the DSP read cursor.
+        // Subtract one burst so we do not accidentally erase data that the DSP might be using.
+        int64_t framesToErase = std::max((int64_t) 0,
+                                         emptyFramesInBuffer - getFramesPerBurst());
+        mAudioEndpoint->eraseEmptyDataMemory(framesToErase);
+
+        // Sleep until we are confident the DSP has consumed all of the valid data.
+        // Sleep for one extra burst as a safety margin because the IsochronousClockModel
+        // is not perfectly accurate.
+        int64_t positionInEmptyMemory = getFramesWritten() + getFramesPerBurst();
+        int64_t timeAllConsumed = mClockModel.convertPositionToTime(positionInEmptyMemory);
+        int64_t durationAllConsumed = timeAllConsumed - AudioClock::getNanoseconds();
+        // Prevent sleeping for too long.
+        durationAllConsumed = std::min(200 * AAUDIO_NANOS_PER_MILLISECOND, durationAllConsumed);
+        AudioClock::sleepForNanos(durationAllConsumed);
+    }
+
+    // Erase all of the memory in case the DSP keeps going and wraps around.
+    mAudioEndpoint->eraseDataMemory();
+
+    // Wait for the last buffer to reach the DAC.
+    // This is because the expected behavior of stop() is that all data written to the stream
+    // should be played before the hardware actually shuts down.
+    // This is different than pause(), where we just end as soon as possible.
+    // This can be important when, for example, playing car navigation and
+    // you want the user to hear the complete instruction.
+    if (mAtomicInternalTimestamp.isValid()) {
+        // Use timestamps to calculate the latency between the DSP reading
+        // a frame and when it reaches the DAC.
+        // This code assumes that timestamps are accurate.
+        Timestamp timestamp = mAtomicInternalTimestamp.read();
+        int64_t dacPosition = timestamp.getPosition();
+        int64_t hardwareReadTime = mClockModel.convertPositionToTime(dacPosition);
+        int64_t hardwareLatencyNanos = timestamp.getNanoseconds() - hardwareReadTime;
+        ALOGD("%s() hardwareLatencyNanos = %lld", __func__,
+              (long long) hardwareLatencyNanos);
+        // Prevent sleeping for too long.
+        hardwareLatencyNanos = std::min(30 * AAUDIO_NANOS_PER_MILLISECOND,
+                                        hardwareLatencyNanos);
+        AudioClock::sleepForNanos(hardwareLatencyNanos);
+    }
+}
+
 void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
@@ -353,20 +410,26 @@
         // Call application using the AAudio callback interface.
         callbackResult = maybeCallDataCallback(mCallbackBuffer.get(), mCallbackFrames);
 
-        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-            // Write audio data to stream. This is a BLOCKING WRITE!
-            result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
-            if ((result != mCallbackFrames)) {
-                if (result >= 0) {
-                    // Only wrote some of the frames requested. The stream can be disconnected
-                    // or timed out.
-                    processCommands();
-                    result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
-                }
-                maybeCallErrorCallback(result);
-                break;
+        // Write audio data to stream. This is a BLOCKING WRITE!
+        // Write data regardless of the callbackResult because we assume the data
+        // is valid even when the callback returns AAUDIO_CALLBACK_RESULT_STOP.
+        // Imagine a callback that is playing a large sound in menory.
+        // When it gets to the end of the sound it can partially fill
+        // the last buffer with the end of the sound, then zero pad the buffer, then return STOP.
+        // If the callback has no valid data then it should zero-fill the entire buffer.
+        result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
+        if ((result != mCallbackFrames)) {
+            if (result >= 0) {
+                // Only wrote some of the frames requested. The stream can be disconnected
+                // or timed out.
+                processCommands();
+                result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
             }
-        } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+            maybeCallErrorCallback(result);
+            break;
+        }
+
+        if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
             result = systemStopInternal();
             break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index b51b5d0..4e14f18 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -66,6 +66,8 @@
 
     void prepareBuffersForStart() override;
 
+    void prepareBuffersForStop() override;
+
     void advanceClientToMatchServerPosition(int32_t serverMargin) override;
 
     void onFlushFromServer() override;
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 5c11882..f3e3bbd 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -150,7 +150,7 @@
 
     getEmptyRoomAvailable(&wrappingBuffer);
 
-    // Read data in one or two parts.
+    // Write data in one or two parts.
     int partIndex = 0;
     while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
         fifo_frames_t framesToWrite = framesLeft;
@@ -192,3 +192,29 @@
         memset(getStorage(), 0, (size_t) numBytes);
     }
 }
+
+fifo_frames_t FifoBuffer::eraseEmptyMemory(fifo_frames_t numFrames) {
+    WrappingBuffer wrappingBuffer;
+    fifo_frames_t framesLeft = numFrames;
+
+    getEmptyRoomAvailable(&wrappingBuffer);
+
+    // Erase data in one or two parts.
+    int partIndex = 0;
+    while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
+        fifo_frames_t framesToWrite = framesLeft;
+        fifo_frames_t framesAvailable = wrappingBuffer.numFrames[partIndex];
+        if (framesAvailable > 0) {
+            if (framesToWrite > framesAvailable) {
+                framesToWrite = framesAvailable;
+            }
+            int32_t numBytes = convertFramesToBytes(framesToWrite);
+            memset(wrappingBuffer.data[partIndex], 0, numBytes);
+            framesLeft -= framesToWrite;
+        } else {
+            break;
+        }
+        partIndex++;
+    }
+    return numFrames - framesLeft; // number erased
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 7b0aca1..860ccad 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -115,6 +115,13 @@
      */
     void eraseMemory();
 
+    /**
+     * Clear some memory after the write pointer.
+     * This can be used to prevent the reader from accidentally reading stale data
+     * in case it is reading asynchronously.
+     */
+    fifo_frames_t eraseEmptyMemory(fifo_frames_t numFrames);
+
 protected:
 
     virtual uint8_t *getStorage() const = 0;
diff --git a/media/libaudioclient/include/media/EffectClientAsyncProxy.h b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
new file mode 100644
index 0000000..e7d6d80
--- /dev/null
+++ b/media/libaudioclient/include/media/EffectClientAsyncProxy.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/media/BnEffectClient.h>
+#include <audio_utils/CommandThread.h>
+
+namespace android::media {
+
+class EffectClientAsyncProxy : public IEffectClient {
+public:
+
+    /**
+     * Call this factory method to interpose a worker thread when a binder
+     * callback interface is invoked in-proc.
+     */
+    static sp<IEffectClient> makeIfNeeded(const sp<IEffectClient>& effectClient) {
+        if (isLocalBinder(effectClient)) {
+            return sp<EffectClientAsyncProxy>::make(effectClient);
+        }
+        return effectClient;
+    }
+
+    explicit EffectClientAsyncProxy(const sp<IEffectClient>& effectClient)
+        : mEffectClient(effectClient) {}
+
+    ::android::IBinder* onAsBinder() override {
+        return nullptr;
+    }
+
+    ::android::binder::Status controlStatusChanged(bool controlGranted) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->controlStatusChanged(controlGranted);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    ::android::binder::Status enableStatusChanged(bool enabled) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->enableStatusChanged(enabled);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    ::android::binder::Status commandExecuted(
+            int32_t cmdCode, const ::std::vector<uint8_t>& cmdData,
+            const ::std::vector<uint8_t>& replyData) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->commandExecuted(cmdCode, cmdData, replyData);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    ::android::binder::Status framesProcessed(int32_t frames) override {
+        getThread().add(__func__, [=, effectClient = mEffectClient]() {
+            effectClient->framesProcessed(frames);
+        });
+        return ::android::binder::Status::fromStatusT(::android::NO_ERROR);
+    }
+
+    /**
+     * Returns true if the binder interface is local (in-proc).
+     *
+     * Move to a binder helper class?
+     */
+    static bool isLocalBinder(const sp<IInterface>& interface) {
+        const auto b = IInterface::asBinder(interface);
+        return b && b->localBinder();
+    }
+
+private:
+    const sp<IEffectClient> mEffectClient;
+
+    /**
+     * Returns the per-interface-descriptor CommandThread for in-proc binder transactions.
+     *
+     * Note: Remote RPC transactions to a given binder (kernel) node enter that node's
+     * async_todo list, which serializes all async operations to that binder node.
+     * Each transaction on the async_todo list must complete before the next one
+     * starts, even though there may be available threads in the process threadpool.
+     *
+     * For local transactions, we order all async requests entering
+     * the CommandThread.  We do not maintain a threadpool, though a future implementation
+     * could use a shared ThreadPool.
+     *
+     * By using a static here, all in-proc binder interfaces made async with
+     * EffectClientAsyncProxy will get the same CommandThread.
+     *
+     * @return CommandThread to use.
+     */
+    static audio_utils::CommandThread& getThread() {
+        [[clang::no_destroy]] static audio_utils::CommandThread commandThread;
+        return commandThread;
+    }
+};  // class EffectClientAsyncProxy
+
+}  // namespace android::media
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index d440ef8..9a8d96b 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -208,6 +208,10 @@
         if (desc.common.flags.volume == Flags::Volume::NONE) {
             common.flags.volume = Flags::Volume::NONE;
         }
+        // set to AUXILIARY if any sub-effect is of AUXILIARY type
+        if (desc.common.flags.type == Flags::Type::AUXILIARY) {
+            common.flags.type = Flags::Type::AUXILIARY;
+        }
     }
 
     // copy type UUID from any of sub-effects, all sub-effects should have same type
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index 5d9886c..f60d616 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -114,10 +114,11 @@
     std::stringstream ss;
     ss << "\t\tHaptic setting:\n";
     ss << "\t\t- tracks intensity map:\n";
-    for (const auto&[id, intensity] : param.id2Intensity) {
-        ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+    for (const auto&[id, hapticScale] : param.id2HapticScale) {
+        ss << "\t\t\t- id=" << id << ", hapticLevel=" << (int) hapticScale.getLevel()
+           << ", adaptiveScaleFactor=" << hapticScale.getAdaptiveScaleFactor();
     }
-    ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+    ss << "\t\t- max scale level: " << (int) param.maxHapticScale.getLevel() << '\n';
     ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
     return ss.str();
 }
@@ -145,7 +146,7 @@
     memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
     context->param.hapticChannelCount = 0;
     context->param.audioChannelCount = 0;
-    context->param.maxHapticIntensity = os::HapticLevel::MUTE;
+    context->param.maxHapticScale = os::HapticScale::mute();
 
     context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
     context->param.bpfQ = 1.0f;
@@ -312,22 +313,25 @@
                                  void *value) {
     switch (param) {
     case HG_PARAM_HAPTIC_INTENSITY: {
-        if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+        if (value == nullptr || size != (uint32_t) (2 * sizeof(int) + sizeof(float))) {
             return -EINVAL;
         }
-        int id = *(int *) value;
-        os::HapticLevel hapticIntensity =
-                static_cast<os::HapticLevel>(*((int *) value + 1));
-        ALOGD("Setting haptic intensity as %d", static_cast<int>(hapticIntensity));
-        if (hapticIntensity == os::HapticLevel::MUTE) {
-            context->param.id2Intensity.erase(id);
+        const int id = *(int *) value;
+        const os::HapticLevel hapticLevel = static_cast<os::HapticLevel>(*((int *) value + 1));
+        const float adaptiveScaleFactor = (*((float *) value + 2));
+        const os::HapticScale hapticScale = {hapticLevel, adaptiveScaleFactor};
+        ALOGD("Updating haptic scale, hapticLevel=%d, adaptiveScaleFactor=%f",
+              static_cast<int>(hapticLevel), adaptiveScaleFactor);
+        if (hapticScale.isScaleMute()) {
+            context->param.id2HapticScale.erase(id);
         } else {
-            context->param.id2Intensity.emplace(id, hapticIntensity);
+            context->param.id2HapticScale.emplace(id, hapticScale);
         }
-        context->param.maxHapticIntensity = hapticIntensity;
-        for (const auto&[id, intensity] : context->param.id2Intensity) {
-            context->param.maxHapticIntensity = std::max(
-                    context->param.maxHapticIntensity, intensity);
+        context->param.maxHapticScale = hapticScale;
+        for (const auto&[id, scale] : context->param.id2HapticScale) {
+            if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
+                context->param.maxHapticScale = scale;
+            }
         }
         break;
     }
@@ -479,7 +483,7 @@
         return -ENODATA;
     }
 
-    if (context->param.maxHapticIntensity == os::HapticLevel::MUTE) {
+    if (context->param.maxHapticScale.isScaleMute()) {
         // Haptic channels are muted, not need to generate haptic data.
         return 0;
     }
@@ -506,7 +510,7 @@
             context->processingChain, context->inputBuffer.data(),
             context->outputBuffer.data(), inBuffer->frameCount);
         os::scaleHapticData(hapticOutBuffer, hapticSampleCount,
-                            { /*level=*/context->param.maxHapticIntensity},
+                            context->param.maxHapticScale,
                             context->param.maxHapticAmplitude);
 
     // For haptic data, the haptic playback thread will copy the data from effect input buffer,
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index f122c0a..dbfc5ea 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -48,9 +48,9 @@
     uint32_t audioChannelCount;
     uint32_t hapticChannelCount;
 
-    // A map from track id to haptic intensity.
-    std::map<int, os::HapticLevel> id2Intensity;
-    os::HapticLevel maxHapticIntensity; // max intensity will be used to scale haptic data.
+    // A map from track id to haptic scale.
+    std::map<int, os::HapticScale> id2HapticScale;
+    os::HapticScale maxHapticScale; // max haptic scale will be used to scale haptic data.
     float maxHapticAmplitude; // max amplitude will be used to limit haptic data absolute values.
 
     float resonantFrequency;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e79e55b..e4f3b83 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3395,9 +3395,6 @@
     if (bufferInfos == nullptr || bufferInfos->value.empty()) {
         return BAD_VALUE;
     }
-    if (cryptoInfos == nullptr || cryptoInfos->value.empty()) {
-        return BAD_VALUE;
-    }
     status_t err = OK;
     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
     msg->setSize("index", index);
@@ -3405,8 +3402,12 @@
         new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
     msg->setObject("memory", memory);
     msg->setSize("offset", offset);
-    msg->setSize("ssize", size);
-    msg->setObject("cryptoInfos", cryptoInfos);
+    if (cryptoInfos != nullptr) {
+        msg->setSize("ssize", size);
+        msg->setObject("cryptoInfos", cryptoInfos);
+    } else {
+        msg->setSize("size", size);
+    }
     msg->setObject("accessUnitInfo", bufferInfos);
     if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
         return err;
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index 5dd8423..dd6da15 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -1,13 +1,4 @@
 {
-  "postsubmit": [
-    // writerTest fails about 5 out of 66
-    // { "name": "writerTest" },
-
-    { "name": "HEVCUtilsUnitTest" },
-    { "name": "ExtractorFactoryTest" }
-
-  ],
-
   "presubmit-large": [
     {
       "name": "CtsMediaMiscTestCases",
@@ -92,8 +83,16 @@
     }
   ],
   "postsubmit": [
+    // writerTest fails about 5 out of 66
+    // { "name": "writerTest" },
     {
        "name": "BatteryChecker_test"
+    },
+    {
+        "name": "ExtractorFactoryTest"
+    },
+    {
+        "name": "HEVCUtilsUnitTest"
     }
   ]
 }
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 959f43e..458ac9c 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -626,6 +626,10 @@
             // ACodec is waiting for all buffers to be returned, do NOT
             // submit any more buffers to the codec.
             bufferSource->onOmxIdle();
+        } else if (param == OMX_StateExecuting) {
+            // Initiating transition from Idle -> Executing
+            // Start submitting buffers to codec.
+            bufferSource->onOmxExecuting();
         } else if (param == OMX_StateLoaded) {
             // Initiating transition from Idle/Executing -> Loaded
             // Buffers are about to be freed.
@@ -2404,13 +2408,6 @@
             asString(event), event, arg1String, arg1, arg2String, arg2);
     const sp<IOMXBufferSource> bufferSource(getBufferSource());
 
-    if (bufferSource != NULL
-            && event == OMX_EventCmdComplete
-            && arg1 == OMX_CommandStateSet
-            && arg2 == OMX_StateExecuting) {
-        bufferSource->onOmxExecuting();
-    }
-
     // allow configuration if we return to the loaded state
     if (event == OMX_EventCmdComplete
             && arg1 == OMX_CommandStateSet
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index c4f2808..deef31d 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+//#define LOG_NDEBUG 0
 #define LOG_TAG "ServiceUtilities"
 
 #include <audio_utils/clock.h>
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index b270813..819f2d6 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -31,6 +31,7 @@
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <media/AudioEffect.h>
+#include <media/EffectClientAsyncProxy.h>
 #include <media/ShmemCompat.h>
 #include <media/TypeConverter.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -1545,13 +1546,16 @@
         return INVALID_OPERATION;
     }
 
-    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t) + sizeof(float));
     effect_param_t *param = (effect_param_t*) request.data();
     param->psize = sizeof(int32_t);
-    param->vsize = sizeof(int32_t) * 2;
+    param->vsize = sizeof(int32_t) * 2 + sizeof(float);
     *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
-    *((int32_t*)param->data + 1) = id;
-    *((int32_t*)param->data + 2) = static_cast<int32_t>(hapticScale.getLevel());
+    int32_t* hapticScalePtr = reinterpret_cast<int32_t*>(param->data + sizeof(int32_t));
+    hapticScalePtr[0] = id;
+    hapticScalePtr[1] = static_cast<int32_t>(hapticScale.getLevel());
+    float* adaptiveScaleFactorPtr = reinterpret_cast<float*>(param->data + 3 * sizeof(int32_t));
+    *adaptiveScaleFactorPtr = hapticScale.getAdaptiveScaleFactor();
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1726,7 +1730,8 @@
                                          const sp<media::IEffectClient>& effectClient,
                                          int32_t priority, bool notifyFramesProcessed)
     : BnEffect(),
-    mEffect(effect), mEffectClient(effectClient), mClient(client), mCblk(NULL),
+    mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
+    mClient(client), mCblk(nullptr),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
     mNotifyFramesProcessed(notifyFramesProcessed)
 {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index d1a09a4..f1be3976 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2927,7 +2927,6 @@
 
             // Set haptic intensity for effect
             if (chain != nullptr) {
-                // TODO(b/324559333): Add adaptive haptics scaling support for the HapticGenerator.
                 chain->setHapticScale_l(track->id(), hapticScale);
             }
         }
@@ -3509,7 +3508,7 @@
             char *endptr;
             unsigned long ul = strtoul(value, &endptr, 0);
             if (*endptr == '\0' && ul != 0) {
-                ALOGD("Silence is golden");
+                ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
                 // The setprop command will not allow a property to be changed after
                 // the first time it is set, so we don't have to worry about un-muting.
                 setMasterMute_l(true);
@@ -7895,6 +7894,11 @@
     if (mSupportedLatencyModes.empty()) {
         return;
     }
+    // Do not update the HAL latency mode if no track is active
+    if (mActiveTracks.isEmpty()) {
+        return;
+    }
+
     audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
     if (mSupportedLatencyModes.size() == 1) {
         // If the HAL only support one latency mode currently, confirm the choice
diff --git a/services/audioflinger/afutils/NBAIO_Tee.cpp b/services/audioflinger/afutils/NBAIO_Tee.cpp
index 86fb128..cdc8e95 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.cpp
+++ b/services/audioflinger/afutils/NBAIO_Tee.cpp
@@ -514,6 +514,12 @@
     return NO_ERROR; // return full path
 }
 
+/* static */
+NBAIO_Tee::RunningTees& NBAIO_Tee::getRunningTees() {
+    [[clang::no_destroy]] static RunningTees runningTees;
+    return runningTees;
+}
+
 } // namespace android
 
 #endif // TEE_SINK
diff --git a/services/audioflinger/afutils/NBAIO_Tee.h b/services/audioflinger/afutils/NBAIO_Tee.h
index a5c544e..5ab1949 100644
--- a/services/audioflinger/afutils/NBAIO_Tee.h
+++ b/services/audioflinger/afutils/NBAIO_Tee.h
@@ -310,10 +310,7 @@
     };
 
     // singleton
-    static RunningTees &getRunningTees() {
-        static RunningTees runningTees;
-        return runningTees;
-    }
+    static RunningTees& getRunningTees();
 
     // The NBAIO TeeImpl may have lifetime longer than NBAIO_Tee if
     // RunningTees::dump() is being called simultaneous to ~NBAIO_Tee().
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index a2ebb8d..cf1a771 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -46,6 +46,9 @@
           "include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
         }
       ]
+    },
+    {
+      "name": "spatializer_tests"
     }
   ]
 }
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 6f71ac5..44f84b9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -235,7 +235,8 @@
                                                   &deviceType,
                                                   String8(mDevice->address().c_str()),
                                                   source,
-                                                  flags);
+                                                  static_cast<audio_input_flags_t>(
+                                                          flags & mProfile->getFlags()));
     LOG_ALWAYS_FATAL_IF(mDevice->type() != deviceType,
                         "%s openInput returned device %08x when given device %08x",
                         __FUNCTION__, mDevice->type(), deviceType);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index d1819fd..3430f4b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -30,7 +30,7 @@
 #include <AudioOutputDescriptor.h>
 #include <android_media_audiopolicy.h>
 
-namespace audio_flags = android::media::audiopolicy;
+namespace audiopolicy_flags = android::media::audiopolicy;
 
 namespace android {
 namespace {
@@ -193,7 +193,7 @@
                     mix.mDeviceType, mix.mDeviceAddress.c_str());
             return BAD_VALUE;
         }
-        if (audio_flags::audio_mix_ownership()) {
+        if (audiopolicy_flags::audio_mix_ownership()) {
             if (mix.mToken == registeredMix->mToken) {
                 ALOGE("registerMix(): same mix already registered - skipping");
                 return BAD_VALUE;
@@ -221,7 +221,7 @@
 {
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(i);
-        if (audio_flags::audio_mix_ownership()) {
+        if (audiopolicy_flags::audio_mix_ownership()) {
             if (mix.mToken == registeredMix->mToken) {
                 ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
                       mix.mDeviceType, mix.mDeviceAddress.c_str());
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 3bebb11..154172a 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -3825,7 +3825,6 @@
 status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes)
 {
     ALOGV("unregisterPolicyMixes() num mixes %zu", mixes.size());
-    status_t endResult = NO_ERROR;
     status_t res = NO_ERROR;
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
@@ -3838,7 +3837,6 @@
                         AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX);
                 if (rSubmixModule == 0) {
                     res = INVALID_OPERATION;
-                    endResult = INVALID_OPERATION;
                     continue;
                 }
             }
@@ -3847,20 +3845,25 @@
 
             if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
                 res = INVALID_OPERATION;
-                endResult = INVALID_OPERATION;
                 continue;
             }
 
-            for (auto device : {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
+            for (auto device: {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
                 if (getDeviceConnectionState(device, address.c_str()) ==
-                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE)  {
-                    res = setDeviceConnectionStateInt(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                      address.c_str(), "remote-submix",
-                                                      AUDIO_FORMAT_DEFAULT);
-                    if (res != OK) {
+                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+                    status_t currentRes =
+                            setDeviceConnectionStateInt(device,
+                                                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                        address.c_str(),
+                                                        "remote-submix",
+                                                        AUDIO_FORMAT_DEFAULT);
+                    if (!audio_flags::audio_mix_ownership()) {
+                        res = currentRes;
+                    }
+                    if (currentRes != OK) {
                         ALOGE("Error making RemoteSubmix device unavailable for mix "
                               "with type %d, address %s", device, address.c_str());
-                        endResult = INVALID_OPERATION;
+                        res = INVALID_OPERATION;
                     }
                 }
             }
@@ -3870,24 +3873,16 @@
         } else if ((mix.mRouteFlags & MIX_ROUTE_FLAG_RENDER) == MIX_ROUTE_FLAG_RENDER) {
             if (mPolicyMixes.unregisterMix(mix) != NO_ERROR) {
                 res = INVALID_OPERATION;
-                endResult = INVALID_OPERATION;
                 continue;
             } else {
                 checkOutputs = true;
             }
         }
     }
-    if (audio_flags::audio_mix_ownership()) {
-        res = endResult;
-        if (res == NO_ERROR && checkOutputs) {
-            checkForDeviceAndOutputChanges();
-            updateCallAndOutputRouting();
-        }
-    } else {
-        if (res == NO_ERROR && checkOutputs) {
-            checkForDeviceAndOutputChanges();
-            updateCallAndOutputRouting();
-        }
+
+    if (res == NO_ERROR && checkOutputs) {
+        checkForDeviceAndOutputChanges();
+        updateCallAndOutputRouting();
     }
     return res;
 }
@@ -3907,7 +3902,7 @@
         _aidl_return.back().mToken = policyMix->mToken;
     }
 
-    ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return->size());
+    ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return.size());
     return OK;
 }
 
@@ -7280,7 +7275,6 @@
     DeviceVector devices;
     for (const auto &productStrategy : mEngine->getOrderedProductStrategies()) {
         StreamTypeVector streams = mEngine->getStreamTypesForProductStrategy(productStrategy);
-        auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
         auto hasStreamActive = [&](auto stream) {
             return hasStream(streams, stream) && isStreamActive(stream, 0);
         };
@@ -7305,6 +7299,7 @@
                 mOutputs.isStrategyActiveOnSameModule(productStrategy, outputDesc))) {
             // Retrieval of devices for voice DL is done on primary output profile, cannot
             // check the route (would force modifying configuration file for this profile)
+            auto attr = mEngine->getAllAttributesForProductStrategy(productStrategy).front();
             devices = mEngine->getOutputDevicesForAttributes(attr, nullptr, fromCache);
             break;
         }
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index cddbf39..05b0ddb 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -107,3 +107,9 @@
         "framework-permission-aidl-cpp",
     ],
 }
+
+cc_library_headers {
+    name: "libaudiopolicyservice_headers",
+    host_supported: true,
+    export_include_dirs: ["."],
+}
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index 16f3a9a..233a484 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -1096,7 +1096,7 @@
 }
 
 void Spatializer::checkSensorsState_l() {
-    audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+    mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
     const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
     bool supportsLowLatencyMode;
     if (com::android::media::audio::dsa_over_bt_le_audio()) {
@@ -1117,7 +1117,7 @@
                     && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
                     && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
                 if (supportsLowLatencyMode) {
-                    requestedLatencyMode = selectHeadtrackingConnectionMode_l();
+                    mRequestedLatencyMode = selectHeadtrackingConnectionMode_l();
                 }
                 if (mEngine != nullptr) {
                     setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
@@ -1139,9 +1139,9 @@
     }
     if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
         const status_t status =
-                AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+                AudioSystem::setRequestedLatencyMode(mOutput, mRequestedLatencyMode);
         ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
-              mOutput, toString(requestedLatencyMode).c_str(), status);
+              mOutput, toString(mRequestedLatencyMode).c_str(), status);
     }
 }
 
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 355df18..c5f159c 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
+#include <media/MediaMetricsItem.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/VectorRecorder.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -153,6 +154,34 @@
         return mLevel;
     }
 
+    /** For test only */
+    std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+            getSupportedHeadtrackingConnectionModes() const {
+        return mSupportedHeadtrackingConnectionModes;
+    }
+
+    /** For test only */
+    media::audio::common::HeadTracking::ConnectionMode getHeadtrackingConnectionMode() const {
+        return mHeadtrackingConnectionMode;
+    }
+
+    /** For test only */
+    std::vector<audio_latency_mode_t> getSupportedLatencyModes() const {
+        audio_utils::lock_guard lock(mMutex);
+        return mSupportedLatencyModes;
+    }
+
+    /** For test only */
+    std::vector<audio_latency_mode_t> getOrderedLowLatencyModes() const {
+        return mOrderedLowLatencyModes;
+    }
+
+    /** For test only */
+    audio_latency_mode_t getRequestedLatencyMode() const {
+        audio_utils::lock_guard lock(mMutex);
+        return mRequestedLatencyMode;
+    }
+
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
@@ -164,6 +193,12 @@
     /** Returns the output stream the spatializer is attached to. */
     audio_io_handle_t getOutput() const { audio_utils::lock_guard lock(mMutex); return mOutput; }
 
+    /** For test only */
+    void setOutput(audio_io_handle_t output) {
+        audio_utils::lock_guard lock(mMutex);
+        mOutput = output;
+    }
+
     void updateActiveTracks(size_t numActiveTracks);
 
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -188,6 +223,10 @@
     // NO_INIT: Spatializer creation failed.
     static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
 
+    /** Made public for test only */
+    void onSupportedLatencyModesChangedMsg(
+            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
+
 private:
     Spatializer(effect_descriptor_t engineDescriptor,
                      SpatializerPolicyCallback *callback);
@@ -200,8 +239,6 @@
 
     void onHeadToStagePoseMsg(const std::vector<float>& headToStage);
     void onActualModeChangeMsg(media::HeadTrackingMode mode);
-    void onSupportedLatencyModesChangedMsg(
-            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
 
     static constexpr int kMaxEffectParamValues = 10;
     /**
@@ -484,9 +521,11 @@
     std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
     std::vector<audio_channel_mask_t> mChannelMasks;
     bool mSupportsHeadTracking;
-    /** List of supported headtracking connection modes reported by the spatializer.
+
+    /** List of supported head tracking connection modes reported by the spatializer.
      * If the list is empty, the spatializer does not support any optional connection
      * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
+     * This is set in the factory constructor and can be accessed without mutex.
      */
     std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
             mSupportedHeadtrackingConnectionModes;
@@ -504,6 +543,9 @@
     std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
     /** preference order for low latency modes according to persist.bluetooth.hid.transport */
     std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
+
+    audio_latency_mode_t mRequestedLatencyMode GUARDED_BY(mMutex) = AUDIO_LATENCY_MODE_FREE;
+
     /** string to latency mode map used to parse bluetooth.core.le.dsa_transport_preference */
     static const std::map<std::string, audio_latency_mode_t> sStringToLatencyModeMap;
     static const std::vector<const char*> sHeadPoseKeys;
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 34bd3b4..21ded60 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -108,3 +108,40 @@
     test_suites: ["device-tests"],
 
 }
+
+cc_test {
+    name: "spatializer_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "libaudiopolicyservice_dependencies",
+    ],
+
+    require_root: true,
+
+    shared_libs: [
+        "libaudioclient",
+        "libaudiofoundation",
+        "libcutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libaudiopolicyservice",
+    ],
+
+    header_libs: [
+          "libaudiohal_headers",
+          "libaudiopolicyservice_headers",
+          "libmediametrics_headers",
+    ],
+
+    srcs: ["spatializer_tests.cpp"],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index e02c93a..afd8765 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1369,6 +1369,7 @@
     AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
             String8(mixAddress.c_str()), 0);
     myAudioMix.mDeviceType = deviceType;
+    myAudioMix.mToken = sp<BBinder>::make();
     // Clear mAudioMix before add new one to make sure we don't add already exist mixes.
     mAudioMixes.clear();
     return addPolicyMix(myAudioMix);
@@ -1569,8 +1570,7 @@
     validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
 
     mAudioMixes.clear();
-    mAudioMixes.add(validAudioMix);
-    status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+    status_t ret = addPolicyMix(validAudioMix);
 
     ASSERT_EQ(NO_ERROR, ret);
 
@@ -1586,8 +1586,7 @@
                              MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
     validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
 
-    mAudioMixes.add(invalidAudioMix);
-    ret = mManager->registerPolicyMixes(mAudioMixes);
+    ret = addPolicyMix(invalidAudioMix);
 
     ASSERT_EQ(INVALID_OPERATION, ret);
 
@@ -1614,8 +1613,7 @@
     validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
 
     mAudioMixes.clear();
-    mAudioMixes.add(validAudioMix);
-    status_t ret = mManager->registerPolicyMixes(mAudioMixes);
+    status_t ret = addPolicyMix(validAudioMix);
 
     ASSERT_EQ(NO_ERROR, ret);
 
@@ -1629,7 +1627,7 @@
 
     AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
                              MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
-    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+    invalidAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
 
     Vector<AudioMix> mixes;
     mixes.add(invalidAudioMix);
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
new file mode 100644
index 0000000..73bef43
--- /dev/null
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Spatializer_Test"
+
+#include "Spatializer.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <gtest/gtest.h>
+
+#include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
+#include <com_android_media_audio.h>
+#include <utils/Log.h>
+
+using namespace android;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+
+class TestSpatializerPolicyCallback :
+        public SpatializerPolicyCallback {
+public:
+    void onCheckSpatializer() override {};
+};
+
+class SpatializerTest : public ::testing::Test {
+protected:
+    void SetUp() override {
+        const sp<EffectsFactoryHalInterface> effectsFactoryHal
+                = EffectsFactoryHalInterface::create();
+        mSpatializer = Spatializer::create(&mTestCallback, effectsFactoryHal);
+        if (mSpatializer == nullptr) {
+            GTEST_SKIP() << "Skipping Spatializer tests: no spatializer";
+        }
+        std::vector<Spatialization::Level> levels;
+        binder::Status status = mSpatializer->getSupportedLevels(&levels);
+        ASSERT_TRUE(status.isOk());
+        for (auto level : levels) {
+            if (level != Spatialization::Level::NONE) {
+                mSpatializer->setLevel(level);
+                break;
+            }
+        }
+        mSpatializer->setOutput(sTestOutput);
+    }
+
+    void TearDown() override {
+        if (mSpatializer == nullptr) {
+            return;
+        }
+        mSpatializer->setLevel(Spatialization::Level::NONE);
+        mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
+        mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
+        mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+        mSpatializer->updateActiveTracks(0);
+    }
+
+    static constexpr audio_io_handle_t sTestOutput= 1977;
+    static constexpr int sTestSensorHandle = 1980;
+
+    const static inline std::vector<audio_latency_mode_t> sA2DPLatencyModes = {
+        AUDIO_LATENCY_MODE_LOW,
+        AUDIO_LATENCY_MODE_FREE
+    };
+    const static inline std::vector<audio_latency_mode_t> sBLELatencyModes = {
+        AUDIO_LATENCY_MODE_LOW,
+        AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+        AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+        AUDIO_LATENCY_MODE_FREE
+    };
+
+    bool setpUpForHeadtracking() {
+        bool htSupported;
+        mSpatializer->isHeadTrackingSupported(&htSupported);
+        if (!htSupported) {
+            return false;
+        }
+
+        std::vector<HeadTracking::Mode> htModes;
+        mSpatializer->getSupportedHeadTrackingModes(&htModes);
+        for (auto htMode : htModes) {
+            if (htMode != HeadTracking::Mode::DISABLED) {
+                mSpatializer->setDesiredHeadTrackingMode(htMode);
+                break;
+            }
+        }
+
+        mSpatializer->setHeadSensor(sTestSensorHandle);
+        return true;
+    }
+
+    TestSpatializerPolicyCallback mTestCallback;
+    sp<Spatializer> mSpatializer;
+};
+
+TEST_F(SpatializerTest, SupportedA2dpLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping SupportedA2dpLatencyTest: head tracking not supported";
+    }
+    std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    std::vector<audio_latency_mode_t> supportedLatencies =
+            mSpatializer->getSupportedLatencyModes();
+
+    ASSERT_TRUE(supportedLatencies == sA2DPLatencyModes);
+    // Free mode must always be the last of the ordered list
+    ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, SupportedBleLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping SupportedBleLatencyTest: head tracking not supported";
+    }
+    if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+        GTEST_SKIP() << "Skipping SupportedBleLatencyTest: DSA over LE not enabled";
+    }
+    std::vector<audio_latency_mode_t> latencies = sBLELatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    std::vector<audio_latency_mode_t> supportedLatencies =
+            mSpatializer->getSupportedLatencyModes();
+
+    ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+    ASSERT_TRUE(std::find(supportedLatencies.begin(), supportedLatencies.end(),
+            AUDIO_LATENCY_MODE_LOW) != supportedLatencies.end());
+
+    std::vector<audio_latency_mode_t> orderedLowLatencyModes =
+        mSpatializer->getOrderedLowLatencyModes();
+
+    std::vector<audio_latency_mode_t> supportedLowLatencyModes;
+    // remove free mode at the end of the supported list to only retain low latency modes
+    std::copy(supportedLatencies.begin(),
+              supportedLatencies.begin() + supportedLatencies.size() - 1,
+              std::back_inserter(supportedLowLatencyModes));
+
+    // Verify that supported low latency modes are always in ordered latency modes list and
+    // in the same order
+    std::vector<audio_latency_mode_t>::iterator lastIt = orderedLowLatencyModes.begin();
+    for (auto latency : supportedLowLatencyModes) {
+        auto it = std::find(orderedLowLatencyModes.begin(), orderedLowLatencyModes.end(), latency);
+        ASSERT_NE(it, orderedLowLatencyModes.end());
+        ASSERT_LE(lastIt, it);
+        lastIt = it;
+    }
+}
+
+TEST_F(SpatializerTest, RequestedA2dpLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping RequestedA2dpLatencyTest: head tracking not supported";
+    }
+
+    std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    // requested latency mode must be free if no spatialized tracks are active
+    audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+    // requested latency mode must be low if at least one spatialized tracks is active
+    mSpatializer->updateActiveTracks(1);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
+
+    // requested latency mode must be free after stopping the last spatialized tracks
+    mSpatializer->updateActiveTracks(0);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, RequestedBleLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping RequestedBleLatencyTest: head tracking not supported";
+    }
+    if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+        GTEST_SKIP() << "Skipping RequestedBleLatencyTest: DSA over LE not enabled";
+    }
+
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+            { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+              AUDIO_LATENCY_MODE_FREE });
+
+    // requested latency mode must be free if no spatialized tracks are active
+    audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+    // requested latency mode must be low software if at least one spatialized tracks is active
+    // and the only supported low latency mode is low software
+    mSpatializer->updateActiveTracks(1);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+            { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+              AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+              AUDIO_LATENCY_MODE_FREE });
+
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    HeadTracking::ConnectionMode connectionMode = mSpatializer->getHeadtrackingConnectionMode();
+
+    // If low hardware mode is used, the spatializer must use either use one of the sensor
+    // connection tunneled modes.
+    // Otherwise, low software mode must be used
+    if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
+        ASSERT_TRUE(connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL
+                        || connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW);
+    } else {
+        ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+    }
+
+    // requested latency mode must be free after stopping the last spatialized tracks
+    mSpatializer->updateActiveTracks(0);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index b748888..c9e9090 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -263,7 +263,7 @@
         "liblog",
         "libutils",
         "libxml2",
-        "camera_platform_flags_c_lib"
+        "camera_platform_flags_c_lib",
     ],
 
     include_dirs: [
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index ebe771e..78c14b2 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -155,17 +155,18 @@
 CameraService::CameraService(
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils) :
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils == nullptr ?
+                std::make_shared<AttributionAndPermissionUtils>()\
+                : attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
                 std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
-        mAttributionAndPermissionUtils(attributionAndPermissionUtils == nullptr ?
-                std::make_shared<AttributionAndPermissionUtils>(this)\
-                : attributionAndPermissionUtils),
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
         mNumberOfCameras(0),
         mNumberOfCamerasWithoutSystemCamera(0),
         mSoundRef(0), mInitialized(false),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
     ALOGI("CameraService started (pid=%d)", getpid());
+    mAttributionAndPermissionUtils->setCameraService(this);
     mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
     mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
     if (mMemFd == -1) {
@@ -707,14 +708,6 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-bool CameraService::isAutomotiveDevice() const {
-    return mAttributionAndPermissionUtils->isAutomotiveDevice();
-}
-
-bool CameraService::isAutomotivePrivilegedClient(int32_t uid) const {
-    return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
-}
-
 bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const {
     // Returns false if this is not an automotive device type.
     if (!isAutomotiveDevice())
@@ -760,49 +753,6 @@
     return true;
 }
 
-static AttributionSourceState attributionSourceFromPidAndUid(int callingPid, int callingUid) {
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    return attributionSource;
-}
-
-bool CameraService::hasPermissionsForCamera(int callingPid, int callingUid) const {
-    return hasPermissionsForCamera(std::string(), callingPid, callingUid);
-}
-
-bool CameraService::hasPermissionsForCamera(const std::string& cameraId, int callingPid,
-        int callingUid) const {
-    auto attributionSource = attributionSourceFromPidAndUid(callingPid, callingUid);
-    return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
-}
-
-bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
-        int callingUid, bool checkCameraPermissions) const {
-    auto attributionSource = attributionSourceFromPidAndUid(callingPid, callingUid);
-    return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
-                cameraId, attributionSource, checkCameraPermissions);
-}
-
-bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
-        int callingPid, int callingUid) const {
-    auto attributionSource = attributionSourceFromPidAndUid(callingPid, callingUid);
-    return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
-                cameraId, attributionSource);
-}
-
-bool CameraService::hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
-    auto attributionSource = attributionSourceFromPidAndUid(callingPid, callingUid);
-    return mAttributionAndPermissionUtils->hasPermissionsForCameraPrivacyAllowlist(
-            attributionSource);
-}
-
-bool CameraService::hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const {
-    auto attributionSource = attributionSourceFromPidAndUid(callingPid, callingUid);
-    return mAttributionAndPermissionUtils->hasPermissionsForOpenCloseListener(
-            attributionSource);
-}
-
 Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
@@ -1353,6 +1303,7 @@
         int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
+
     const std::string cameraId = resolveCameraId(
         unresolvedCameraId, CameraThreadState::getCallingUid());
     if (!mInitialized) {
@@ -1681,16 +1632,6 @@
     return STATUS_ERROR(ERROR_INVALID_OPERATION, "Unable to initialize legacy parameters");
 }
 
-// Can camera service trust the caller based on the calling UID?
-bool CameraService::isTrustedCallingUid(uid_t uid) const {
-    return mAttributionAndPermissionUtils->isTrustedCallingUid(uid);
-}
-
-status_t CameraService::getUidForPackage(const std::string &packageName, int userId,
-        /*inout*/uid_t& uid, int err) const {
-    return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
-}
-
 Status CameraService::validateConnectLocked(const std::string& cameraId,
         const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
         /*out*/int& originalClientPid) const {
@@ -1755,7 +1696,7 @@
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
                 callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientUid, clientPid);
+                clientName.c_str(), clientPid, clientUid);
     }
 
     // Check if we can trust clientPid
@@ -1768,7 +1709,7 @@
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
                 callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientUid, clientPid);
+                clientName.c_str(), clientPid, clientUid);
     }
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -1788,13 +1729,14 @@
     // If it's not calling from cameraserver, check the permission if the
     // device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
     // android.permission.SYSTEM_CAMERA for system only camera devices).
-    bool checkPermissionForCamera = hasPermissionsForCamera(cameraId, clientPid, clientUid);
+    bool checkPermissionForCamera =
+            hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
-                clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
     // Make sure the UID is in an active state to use the camera
@@ -1805,7 +1747,7 @@
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" from background ("
                 "calling UID %d proc state %" PRId32 ")",
-                clientName.c_str(), clientUid, clientPid, cameraId.c_str(),
+                clientName.c_str(), clientPid, clientUid, cameraId.c_str(),
                 callingUid, procState);
     }
 
@@ -1818,7 +1760,7 @@
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
-                "is enabled", clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
     // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
@@ -1844,14 +1786,14 @@
         // If the System User tries to access the camera when the device is running in
         // headless system user mode, ensure that client has the required permission
         // CAMERA_HEADLESS_SYSTEM_USER.
-        if (mAttributionAndPermissionUtils->isHeadlessSystemUserMode()
+        if (isHeadlessSystemUserMode()
                 && (clientUserId == USER_SYSTEM)
                 && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
             ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                     "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
                     User without camera headless system user permission",
-                    clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                    clientName.c_str(), clientPid, clientUid, cameraId.c_str());
         }
     }
 
@@ -2212,7 +2154,7 @@
     }
 
     // (1) Cameraserver trying to connect, accept.
-    if (mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating()) {
+    if (isCallerCameraServerNotDelegating()) {
         return false;
     }
     // (2)
@@ -2249,16 +2191,17 @@
     sp<CameraDeviceClient> client = nullptr;
     std::string clientPackageNameAdj = clientPackageName;
     int callingPid = CameraThreadState::getCallingPid();
+    int callingUid = CameraThreadState::getCallingUid();
     bool systemNativeClient = false;
     if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
-        std::string systemClient =
-                fmt::sprintf("client.pid<%d>", CameraThreadState::getCallingPid());
+        std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
         clientPackageNameAdj = systemClient;
         systemNativeClient = true;
     }
+
     const std::string cameraId = resolveCameraId(
             unresolvedCameraId,
-            CameraThreadState::getCallingUid(),
+            callingUid,
             clientPackageNameAdj);
 
     if (oomScoreOffset < 0) {
@@ -2271,7 +2214,6 @@
     }
 
     userid_t clientUserId = multiuser_get_user_id(clientUid);
-    int callingUid = CameraThreadState::getCallingUid();
     if (clientUid == USE_CALLING_UID) {
         clientUserId = multiuser_get_user_id(callingUid);
     }
@@ -2288,8 +2230,8 @@
     // enforce system camera permissions
     if (oomScoreOffset > 0
             && !hasPermissionsForSystemCamera(cameraId, callingPid,
-                    CameraThreadState::getCallingUid())
-            && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+                    callingUid)
+            && !isTrustedCallingUid(callingUid)) {
         std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
                         clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -2329,8 +2271,8 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            callingUid,
+            callingPid);
     }
     return ret;
 }
@@ -2353,12 +2295,8 @@
         return true;
     } else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
         return false;
-    } else if ((mSensorPrivacyPolicy->getCameraPrivacyState()
-            == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS) ||
-            (mSensorPrivacyPolicy->getCameraPrivacyState()
-            == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS) ||
-            (mSensorPrivacyPolicy->getCameraPrivacyState() ==
-            SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)) {
+    } else if (mSensorPrivacyPolicy->getCameraPrivacyState()
+            == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
         if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
             return false;
         } else {
@@ -4015,7 +3953,7 @@
         const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, bool overrideToPortrait):
-        mAttributionAndPermissionUtils(attributionAndPermissionUtils),
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -4048,7 +3986,7 @@
         mAppOpsManager = std::make_unique<AppOpsManager>();
     }
 
-    mUidIsTrusted = mAttributionAndPermissionUtils->isTrustedCallingUid(mClientUid);
+    mUidIsTrusted = isTrustedCallingUid(mClientUid);
 }
 
 CameraService::BasicClient::~BasicClient() {
@@ -4787,7 +4725,7 @@
     }
     hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
-    if (mAttributionAndPermissionUtils->isAutomotiveDevice()) {
+    if (isAutomotiveDevice()) {
         mSpm.addToggleSensorPrivacyListener(this);
     }
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
@@ -4827,7 +4765,7 @@
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
-    if (mAttributionAndPermissionUtils->isAutomotiveDevice()) {
+    if (isAutomotiveDevice()) {
         mSpm.removeToggleSensorPrivacyListener(this);
     }
     mSpm.unlinkToDeath(this);
@@ -4903,9 +4841,7 @@
     // if sensor privacy is enabled then block all clients from accessing the camera
     if (state == SensorPrivacyManager::ENABLED) {
         service->blockAllClients();
-    } else if ((state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_APPS)
-            || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_HELPFUL_APPS)
-            || (state == SensorPrivacyManager::AUTOMOTIVE_DRIVER_ASSISTANCE_REQUIRED_APPS)) {
+    } else if (state == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
         service->blockPrivacyEnabledClients();
     }
     return binder::Status::ok();
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 11cf1a1..1a887a1 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -78,7 +78,8 @@
     public virtual ::android::hardware::BnCameraService,
     public virtual IBinder::DeathRecipient,
     public virtual CameraProviderManager::StatusListener,
-    public virtual IServiceManager::LocalRegistrationCallback
+    public virtual IServiceManager::LocalRegistrationCallback,
+    public AttributionAndPermissionUtilsEncapsulator
 {
     friend class BinderService<CameraService>;
     friend class CameraOfflineSessionClient;
@@ -317,13 +318,6 @@
     // Shared utilities
     static binder::Status filterGetInfoErrorCode(status_t err);
 
-    bool isAutomotiveDevice() const;
-
-    /**
-     * Returns true if the client has uid AID_AUTOMOTIVE_EVS and the device is an automotive device.
-     */
-    bool isAutomotivePrivilegedClient(int32_t uid) const;
-
     /**
      * Returns true if the device is an automotive device and cameraId is system
      * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
@@ -335,7 +329,9 @@
     /////////////////////////////////////////////////////////////////////
     // CameraClient functionality
 
-    class BasicClient : public virtual RefBase {
+    class BasicClient :
+        public virtual RefBase,
+        public AttributionAndPermissionUtilsEncapsulator {
     friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
@@ -460,8 +456,6 @@
 
         virtual ~BasicClient();
 
-        std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
-
         // the instance is in the middle of destruction. When this is set,
         // the instance should not be accessed from callback.
         // CameraService's mClientLock should be acquired to access this.
@@ -681,20 +675,6 @@
         return activityManager;
     }
 
-    bool hasPermissionsForCamera(int callingPid, int callingUid) const;
-
-    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid) const;
-
-    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
-            bool checkCameraPermissions = true) const;
-
-    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
-            int callingUid) const;
-
-    bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const;
-
-    bool hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const;
-
    /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
@@ -879,12 +859,13 @@
     // prevented from accessing the camera.
     class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
             public virtual IBinder::DeathRecipient,
-            public virtual IServiceManager::LocalRegistrationCallback {
+            public virtual IServiceManager::LocalRegistrationCallback,
+            public AttributionAndPermissionUtilsEncapsulator {
         public:
             explicit SensorPrivacyPolicy(wp<CameraService> service,
                     std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
-                    : mService(service),
-                      mAttributionAndPermissionUtils(attributionAndPermissionUtils),
+                    : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+                      mService(service),
                       mSensorPrivacyEnabled(false),
                     mCameraPrivacyState(SensorPrivacyManager::DISABLED), mRegistered(false) {}
 
@@ -909,7 +890,6 @@
         private:
             SensorPrivacyManager mSpm;
             wp<CameraService> mService;
-            std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
             int mCameraPrivacyState;
@@ -924,7 +904,6 @@
     sp<SensorPrivacyPolicy> mSensorPrivacyPolicy;
 
     std::shared_ptr<CameraServiceProxyWrapper> mCameraServiceProxyWrapper;
-    std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
 
     // Delay-load the Camera HAL module
     virtual void onFirstRef();
@@ -937,11 +916,6 @@
     void addStates(const std::string& id);
     void removeStates(const std::string& id);
 
-    bool isTrustedCallingUid(uid_t uid) const;
-
-    status_t getUidForPackage(const std::string &packageName, int userId,
-            /*inout*/uid_t& uid, int err) const;
-
     // Check if we can connect, before we acquire the service lock.
     // The returned originalClientPid is the PID of the original process that wants to connect to
     // camera.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index ac4b039..fa063b8 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -1060,7 +1060,7 @@
         static const nsecs_t kRequestTimeout = 50e6; // 50 ms
 
         // TODO: does this need to be adjusted for long exposure requests?
-        static const nsecs_t kRequestSubmitTimeout = 200e6; // 200 ms
+        static const nsecs_t kRequestSubmitTimeout = 500e6; // 500 ms
 
         // Used to prepare a batch of requests.
         struct NextRequest {
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index dc4cfb1..7f4a1bd 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -33,10 +33,13 @@
  */
 class AttributionAndPermissionUtils {
 public:
-    AttributionAndPermissionUtils(wp<CameraService> cameraService) : mCameraService(cameraService)
-            {}
+    AttributionAndPermissionUtils() { }
     virtual ~AttributionAndPermissionUtils() {}
 
+    void setCameraService(wp<CameraService> cameraService) {
+        mCameraService = cameraService;
+    }
+
     /**
      * Pre-grants the permission if the attribution source uid is for an automotive
      * privileged client. Otherwise uses system service permission checker to check
@@ -49,10 +52,18 @@
     virtual bool checkPermissionForPreflight(const std::string &cameraId,
             const std::string &permission, const AttributionSourceState& attributionSource,
             const std::string& message, int32_t attributedOpCode);
+
+    // Can camera service trust the caller based on the calling UID?
     virtual bool isTrustedCallingUid(uid_t uid);
+
     virtual bool isAutomotiveDevice();
     virtual bool isHeadlessSystemUserMode();
+
+    /**
+     * Returns true if the client has uid AID_AUTOMOTIVE_EVS and the device is an automotive device.
+     */
     virtual bool isAutomotivePrivilegedClient(int32_t uid);
+
     virtual status_t getUidForPackage(const std::string &packageName, int userId,
             /*inout*/uid_t& uid, int err);
     virtual bool isCallerCameraServerNotDelegating();
@@ -86,6 +97,107 @@
             const AttributionSourceState &attributionSource);
 };
 
+/**
+ * Class to be inherited by classes encapsulating AttributionAndPermissionUtils. Provides an
+ * additional utility layer above AttributionAndPermissionUtils calls, and avoids verbosity
+ * in the encapsulating class's methods.
+ */
+class AttributionAndPermissionUtilsEncapsulator {
+protected:
+    std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
+
+public:
+    AttributionAndPermissionUtilsEncapsulator(
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+            : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        AttributionSourceState attributionSource{};
+        attributionSource.pid = callingPid;
+        attributionSource.uid = callingUid;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            const std::string& packageName) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+        attributionSource.packageName = packageName;
+        return attributionSource;
+    }
+
+    bool hasPermissionsForCamera(int callingPid, int callingUid) const {
+        return hasPermissionsForCamera(std::string(), callingPid, callingUid);
+    }
+
+    bool hasPermissionsForCamera(int callingPid, int callingUid,
+            const std::string& packageName) const {
+        return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName);
+    }
+
+    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid,
+            int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
+            const std::string& packageName) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid, packageName);
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
+            bool checkCameraPermissions = true) const  {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
+                    cameraId, attributionSource, checkCameraPermissions);
+    }
+
+    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+            int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
+                    cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCameraPrivacyAllowlist(
+                attributionSource);
+    }
+
+    bool hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForOpenCloseListener(
+                attributionSource);
+    }
+
+    bool isAutomotiveDevice() const {
+        return mAttributionAndPermissionUtils->isAutomotiveDevice();
+    }
+
+    bool isAutomotivePrivilegedClient(int32_t uid) const {
+        return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
+    }
+
+    bool isTrustedCallingUid(uid_t uid) const {
+        return mAttributionAndPermissionUtils->isTrustedCallingUid(uid);
+    }
+
+    bool isHeadlessSystemUserMode() const {
+        return mAttributionAndPermissionUtils->isHeadlessSystemUserMode();
+    }
+
+    status_t getUidForPackage(const std::string &packageName, int userId,
+            /*inout*/uid_t& uid, int err) const {
+        return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
+    }
+
+    bool isCallerCameraServerNotDelegating() const {
+        return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
+    }
+};
+
 } // namespace android
 
 #endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
diff --git a/services/camera/virtualcamera/TEST_MAPPING b/services/camera/virtualcamera/TEST_MAPPING
index 25fca73..e976704 100644
--- a/services/camera/virtualcamera/TEST_MAPPING
+++ b/services/camera/virtualcamera/TEST_MAPPING
@@ -9,7 +9,8 @@
         {
           "exclude-annotation": "androidx.test.filters.FlakyTest"
         }
-      ]
+      ],
+      "keywords": ["primary-device"]
     }
   ]
 }
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 7636cbd..ddb245a 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -124,9 +124,9 @@
          kOutputFormats.end();
 }
 
-std::vector<MetadataBuilder::FpsRange> fpsRangesForInputConfig(
+std::vector<FpsRange> fpsRangesForInputConfig(
     const std::vector<SupportedStreamConfiguration>& configs) {
-  std::set<MetadataBuilder::FpsRange> availableRanges;
+  std::set<FpsRange> availableRanges;
 
   for (const SupportedStreamConfiguration& config : configs) {
     availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
@@ -141,8 +141,7 @@
     availableRanges.insert({.minFps = 30, .maxFps = 30});
   }
 
-  return std::vector<MetadataBuilder::FpsRange>(availableRanges.begin(),
-                                                availableRanges.end());
+  return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
 }
 
 std::optional<Resolution> getMaxResolution(
@@ -301,14 +300,41 @@
                                     ANDROID_JPEG_THUMBNAIL_QUALITY,
                                     ANDROID_NOISE_REDUCTION_MODE,
                                     ANDROID_STATISTICS_FACE_DETECT_MODE})
-          .setAvailableResultKeys(
-              {ANDROID_COLOR_CORRECTION_ABERRATION_MODE, ANDROID_CONTROL_AE_MODE,
-               ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
-               ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_EFFECT_MODE,
-               ANDROID_CONTROL_MODE, ANDROID_FLASH_MODE, ANDROID_FLASH_STATE,
-               ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ANDROID_JPEG_QUALITY,
-               ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_LENS_FOCAL_LENGTH,
-               ANDROID_SENSOR_TIMESTAMP, ANDROID_NOISE_REDUCTION_MODE})
+          .setAvailableResultKeys({
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+              ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+              ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+              ANDROID_CONTROL_AE_LOCK,
+              ANDROID_CONTROL_AE_MODE,
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+              ANDROID_CONTROL_AE_STATE,
+              ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+              ANDROID_CONTROL_AF_MODE,
+              ANDROID_CONTROL_AF_STATE,
+              ANDROID_CONTROL_AF_TRIGGER,
+              ANDROID_CONTROL_AWB_LOCK,
+              ANDROID_CONTROL_AWB_MODE,
+              ANDROID_CONTROL_AWB_STATE,
+              ANDROID_CONTROL_CAPTURE_INTENT,
+              ANDROID_CONTROL_EFFECT_MODE,
+              ANDROID_CONTROL_MODE,
+              ANDROID_CONTROL_SCENE_MODE,
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+              ANDROID_STATISTICS_FACE_DETECT_MODE,
+              ANDROID_FLASH_MODE,
+              ANDROID_FLASH_STATE,
+              ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+              ANDROID_JPEG_QUALITY,
+              ANDROID_JPEG_THUMBNAIL_QUALITY,
+              ANDROID_LENS_FOCAL_LENGTH,
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+              ANDROID_NOISE_REDUCTION_MODE,
+              ANDROID_REQUEST_PIPELINE_DEPTH,
+              ANDROID_SENSOR_TIMESTAMP,
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+              ANDROID_STATISTICS_SCENE_FLICKER,
+          })
           .setAvailableCapabilities(
               {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
 
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index c274dc9..4c50041 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -24,6 +24,7 @@
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "system/camera_metadata.h"
 #include "util/Util.h"
 
 namespace android {
@@ -121,6 +122,9 @@
   // Default JPEG compression quality.
   static constexpr uint8_t kDefaultJpegQuality = 80;
 
+  static constexpr camera_metadata_enum_android_control_capture_intent_t
+      kDefaultCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+
  private:
   std::shared_ptr<VirtualCameraDevice> sharedFromThis();
 
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 9b0fc07..6e89b5f 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -92,30 +92,62 @@
     const std::chrono::nanoseconds timestamp,
     const RequestSettings& requestSettings,
     const Resolution reportedSensorSize) {
-  std::unique_ptr<CameraMetadata> metadata =
+  // All of the keys used in the response needs to be referenced in
+  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+  // in VirtualCameraDevice.cc).
+  MetadataBuilder builder =
       MetadataBuilder()
           .setAberrationCorrectionMode(
               ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+          .setControlAeExposureCompensation(0)
+          .setControlAeLockAvailable(false)
+          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
           .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
           .setControlAePrecaptureTrigger(
               ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
           .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
           .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+          .setControlCaptureIntent(requestSettings.captureIntent)
           .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
           .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+          .setControlVideoStabilizationMode(
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
           .setCropRegion(0, 0, reportedSensorSize.width,
                          reportedSensorSize.height)
           .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
           .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
           .setFocalLength(VirtualCameraDevice::kFocalLength)
           .setJpegQuality(requestSettings.jpegQuality)
           .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
                                 requestSettings.thumbnailResolution.height)
           .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+          .setLensOpticalStabilizationMode(
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
           .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
           .setPipelineDepth(kPipelineDepth)
           .setSensorTimestamp(timestamp)
-          .build();
+          .setStatisticsHotPixelMapMode(
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+          .setStatisticsLensShadingMapMode(
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+  if (requestSettings.fpsRange.has_value()) {
+    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+  }
+
+  std::unique_ptr<CameraMetadata> metadata = builder.build();
+
   if (metadata == nullptr) {
     ALOGE("%s: Failed to build capture result metadata", __func__);
     return CameraMetadata();
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 86dad0b..e8e0bd4 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -33,6 +33,7 @@
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/EglSurfaceTexture.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
@@ -58,6 +59,9 @@
   int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
   Resolution thumbnailResolution = Resolution(0, 0);
   int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+  std::optional<FpsRange> fpsRange = {};
+  camera_metadata_enum_android_control_capture_intent_t captureIntent =
+      VirtualCameraDevice::kDefaultCaptureIntent;
 };
 
 // Represents single capture request to fill set of buffers.
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 2a691c1..a0bb72e 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -148,7 +148,7 @@
           .setControlMode(ANDROID_CONTROL_MODE_AUTO)
           .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
           .setControlAeExposureCompensation(0)
-          .setControlAeTargetFpsRange(maxFps, maxFps)
+          .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
           .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
           .setControlAePrecaptureTrigger(
               ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
@@ -260,7 +260,10 @@
       .thumbnailResolution =
           getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
       .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
-          VirtualCameraDevice::kDefaultJpegQuality)};
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .fpsRange = getFpsRange(metadata),
+      .captureIntent = getCaptureIntent(metadata).value_or(
+          ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW)};
 }
 
 }  // namespace
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
index 5fa53d8..5fa8852 100644
--- a/services/camera/virtualcamera/flags/Android.bp
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -35,27 +35,3 @@
     export_include_dirs: ["."],
     defaults: ["virtual_device_build_flags_defaults"],
 }
-
-soong_config_module_type {
-    name: "virtual_device_build_flags_java_library",
-    module_type: "java_library",
-    config_namespace: "vdm",
-    bool_variables: [
-        "virtual_camera_service_enabled",
-    ],
-    properties: [
-        "srcs",
-    ],
-}
-
-virtual_device_build_flags_java_library {
-    name: "virtual_device_build_flag_java",
-    soong_config_variables: {
-        virtual_camera_service_enabled: {
-            srcs: ["java/enabled/**/*.java"],
-            conditions_default: {
-                srcs: ["java/disabled/**/*.java"],
-            },
-        },
-    },
-}
diff --git a/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
deleted file mode 100644
index 128d93c..0000000
--- a/services/camera/virtualcamera/flags/java/disabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.companion.virtualdevice.flags;
-
-/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag isn't set.*/
-public class VirtualCameraServiceBuildFlag {
-
-    public static boolean isVirtualCameraServiceBuildFlagEnabled() {
-        return false;
-    }
-}
diff --git a/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java b/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
deleted file mode 100644
index 02816fb..0000000
--- a/services/camera/virtualcamera/flags/java/enabled/android/companion/virtualdevice/flags/VirtualCameraServiceBuildFlag.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.companion.virtualdevice.flags;
-
-/** This file is included only if RELEASE_PACKAGE_VIRTUAL_CAMERA build flag is set.*/
-public class VirtualCameraServiceBuildFlag {
-
-    public static boolean isVirtualCameraServiceBuildFlagEnabled() {
-        return true;
-    }
-}
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
index e3d9e28..119260f 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -23,6 +23,7 @@
 #include <cstdint>
 #include <iterator>
 #include <memory>
+#include <optional>
 #include <utility>
 #include <variant>
 #include <vector>
@@ -59,7 +60,7 @@
 }  // namespace
 
 MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
-    camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
+    const camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
   mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
       asVectorOf<uint8_t>(hwLevel);
   return *this;
@@ -86,7 +87,7 @@
 }
 
 MetadataBuilder& MetadataBuilder::setLensFacing(
-    camera_metadata_enum_android_lens_facing lensFacing) {
+    const camera_metadata_enum_android_lens_facing lensFacing) {
   mEntryMap[ANDROID_LENS_FACING] = asVectorOf<uint8_t>(lensFacing);
   return *this;
 }
@@ -175,6 +176,12 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlSceneMode(
+    const camera_metadata_enum_android_control_scene_mode sceneMode) {
+  mEntryMap[ANDROID_CONTROL_SCENE_MODE] = asVectorOf<uint8_t>(sceneMode);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setControlAvailableEffects(
     const std::vector<camera_metadata_enum_android_control_effect_mode>&
         availableEffects) {
@@ -198,6 +205,14 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlVideoStabilizationMode(
+    const camera_metadata_enum_android_control_video_stabilization_mode
+        stabilizationMode) {
+  mEntryMap[ANDROID_CONTROL_VIDEO_STABILIZATION_MODE] =
+      asVectorOf<uint8_t>(stabilizationMode);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
     const std::vector<camera_metadata_enum_android_control_af_mode_t>&
         availableModes) {
@@ -212,6 +227,12 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlAfState(
+    const camera_metadata_enum_android_control_af_state afState) {
+  mEntryMap[ANDROID_CONTROL_AF_STATE] = asVectorOf<uint8_t>(afState);
+  return *this;
+}
+
 // See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
 MetadataBuilder& MetadataBuilder::setControlAfTrigger(
     const camera_metadata_enum_android_control_af_trigger_t trigger) {
@@ -232,14 +253,14 @@
 }
 
 MetadataBuilder& MetadataBuilder::setControlAeTargetFpsRange(
-    const int32_t minFps, const int32_t maxFps) {
+    const FpsRange fpsRange) {
   mEntryMap[ANDROID_CONTROL_AE_TARGET_FPS_RANGE] =
-      std::vector<int32_t>({minFps, maxFps});
+      std::vector<int32_t>({fpsRange.minFps, fpsRange.maxFps});
   return *this;
 }
 
 MetadataBuilder& MetadataBuilder::setControlAeMode(
-    camera_metadata_enum_android_control_ae_mode_t mode) {
+    const camera_metadata_enum_android_control_ae_mode_t mode) {
   mEntryMap[ANDROID_CONTROL_AE_MODE] = asVectorOf<uint8_t>(mode);
   return *this;
 }
@@ -277,6 +298,12 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlAwbState(
+    const camera_metadata_enum_android_control_awb_state awbState) {
+  mEntryMap[ANDROID_CONTROL_AWB_STATE] = asVectorOf<uint8_t>(awbState);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setControlAwbLockAvailable(
     const bool awbLockAvailable) {
   const uint8_t lockAvailable = awbLockAvailable
@@ -287,6 +314,12 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlAwbLock(
+    const camera_metadata_enum_android_control_awb_lock awbLock) {
+  mEntryMap[ANDROID_CONTROL_AWB_LOCK] = asVectorOf<uint8_t>(awbLock);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setControlAeAvailableAntibandingModes(
     const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
         antibandingModes) {
@@ -313,6 +346,12 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlAeLock(
+    const camera_metadata_enum_android_control_ae_lock aeLock) {
+  mEntryMap[ANDROID_CONTROL_AE_LOCK] = asVectorOf<uint8_t>(aeLock);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setControlAeRegions(
     const std::vector<ControlRegion>& aeRegions) {
   std::vector<int32_t> regions;
@@ -421,7 +460,7 @@
 }
 
 MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
-    camera_metadata_enum_android_sync_max_latency latency) {
+    const camera_metadata_enum_android_sync_max_latency latency) {
   mEntryMap[ANDROID_SYNC_MAX_LATENCY] = asVectorOf<int32_t>(latency);
   return *this;
 }
@@ -506,7 +545,7 @@
 }
 
 MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
-    camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+    const camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
   mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
       asVectorOf<uint8_t>(noiseReductionMode);
   return *this;
@@ -585,6 +624,43 @@
   return *this;
 }
 
+MetadataBuilder& MetadataBuilder::setControlAeState(
+    const camera_metadata_enum_android_control_ae_state aeState) {
+  mEntryMap[ANDROID_CONTROL_AE_STATE] = asVectorOf<uint8_t>(aeState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsSceneFlicker(
+    const camera_metadata_enum_android_statistics_scene_flicker sceneFlicker) {
+  mEntryMap[ANDROID_STATISTICS_SCENE_FLICKER] =
+      asVectorOf<uint8_t>(sceneFlicker);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsHotPixelMapMode(
+    const camera_metadata_enum_android_statistics_hot_pixel_map_mode
+        hotPixelMapMode) {
+  mEntryMap[ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE] =
+      asVectorOf<uint8_t>(hotPixelMapMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsLensShadingMapMode(
+    const camera_metadata_enum_android_statistics_lens_shading_map_mode
+        lensShadingMapMode) {
+  mEntryMap[ANDROID_STATISTICS_LENS_SHADING_MAP_MODE] =
+      asVectorOf<uint8_t>(lensShadingMapMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensOpticalStabilizationMode(
+    const camera_metadata_enum_android_lens_optical_stabilization_mode_t
+        opticalStabilizationMode) {
+  mEntryMap[ANDROID_LENS_OPTICAL_STABILIZATION_MODE] =
+      asVectorOf<uint8_t>(opticalStabilizationMode);
+  return *this;
+}
+
 MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
     const std::vector<int32_t>& keys) {
   mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
@@ -724,6 +800,38 @@
   return thumbnailSizes;
 }
 
+std::optional<FpsRange> getFpsRange(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry) != OK ||
+      entry.count != 2) {
+    return {};
+  }
+
+  FpsRange range{.minFps = entry.data.i32[0], .maxFps = entry.data.i32[1]};
+  return range;
+}
+
+std::optional<camera_metadata_enum_android_control_capture_intent>
+getCaptureIntent(const aidl::android::hardware::camera::device::CameraMetadata&
+                     cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_CAPTURE_INTENT,
+                                    &entry) != OK) {
+    return {};
+  }
+
+  return static_cast<camera_metadata_enum_android_control_capture_intent>(
+      entry.data.u8[0]);
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
index b4d60cb..f6848cd 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -59,16 +59,6 @@
     int32_t weight = 0;
   };
 
-  struct FpsRange {
-    int32_t minFps;
-    int32_t maxFps;
-
-    bool operator<(const FpsRange& other) const {
-      return maxFps == other.maxFps ? minFps < other.minFps
-                                    : maxFps < other.maxFps;
-    }
-  };
-
   MetadataBuilder() = default;
   ~MetadataBuilder() = default;
 
@@ -193,6 +183,10 @@
       const std::vector<camera_metadata_enum_android_control_scene_mode>&
           availableSceneModes);
 
+  // See ANDROID_CONTROL_SCENE_MODE in CameraMetadataTag.aidl
+  MetadataBuilder& setControlSceneMode(
+      camera_metadata_enum_android_control_scene_mode sceneMode);
+
   // See ANDROID_CONTROL_AVAILABLE_EFFECTS in CameraMetadataTag.aidl.
   MetadataBuilder& setControlAvailableEffects(
       const std::vector<camera_metadata_enum_android_control_effect_mode>&
@@ -202,12 +196,17 @@
   MetadataBuilder& setControlEffectMode(
       camera_metadata_enum_android_control_effect_mode_t effectMode);
 
-  // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES
+  // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES in CameraMetadataTag.aidl.
   MetadataBuilder& setControlAvailableVideoStabilizationModes(
       const std::vector<
           camera_metadata_enum_android_control_video_stabilization_mode_t>&
           videoStabilizationModes);
 
+  // See ANDROID_CONTROL_VIDEO_STABILIZATION_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlVideoStabilizationMode(
+      camera_metadata_enum_android_control_video_stabilization_mode
+          stabilizationMode);
+
   // See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
   MetadataBuilder& setControlAeAvailableAntibandingModes(
       const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
@@ -256,7 +255,7 @@
       const std::vector<FpsRange>& fpsRanges);
 
   // See ANDROID_CONTROL_AE_TARGET_FPS_RANGE in CaptureRequest.java.
-  MetadataBuilder& setControlAeTargetFpsRange(int32_t min, int32_t max);
+  MetadataBuilder& setControlAeTargetFpsRange(FpsRange fpsRange);
 
   // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
   MetadataBuilder& setControlCaptureIntent(
@@ -278,9 +277,21 @@
   // See CONTROL_AWB_LOCK_AVAILABLE in CameraMetadataTag.aidl.
   MetadataBuilder& setControlAwbLockAvailable(bool awbLockAvailable);
 
+  // See CONTROL_AWB_LOCK in CameraMetadataTag.aidl
+  MetadataBuilder& setControlAwbLock(
+      camera_metadata_enum_android_control_awb_lock awbLock);
+
   // See CONTROL_AE_LOCK_AVAILABLE in CameraMetadataTag.aidl.
   MetadataBuilder& setControlAeLockAvailable(bool aeLockAvailable);
 
+  // See CONTROL_AE_LOCK in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeLock(
+      camera_metadata_enum_android_control_ae_lock aeLock);
+
+  // See CONTROL_AE_STATE in CameraMetadataTag.aidl
+  MetadataBuilder& setControlAeState(
+      camera_metadata_enum_android_control_ae_state aeState);
+
   // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
   MetadataBuilder& setControlAeRegions(
       const std::vector<ControlRegion>& aeRegions);
@@ -289,6 +300,10 @@
   MetadataBuilder& setControlAwbRegions(
       const std::vector<ControlRegion>& awbRegions);
 
+  // See ANDROID_CONTROL_AWB_STATE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbState(
+      camera_metadata_enum_android_control_awb_state awbState);
+
   // See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
   MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
                                  int32_t height);
@@ -297,6 +312,10 @@
   MetadataBuilder& setControlAfRegions(
       const std::vector<ControlRegion>& afRegions);
 
+  // See ANDROID_CONTROL_AF_STATE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfState(
+      camera_metadata_enum_android_control_af_state aeftate);
+
   // The size of the compressed JPEG image, in bytes.
   //
   // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
@@ -342,6 +361,24 @@
   // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
   MetadataBuilder& setControlZoomRatioRange(float min, float max);
 
+  // See ANDROID_STATISTICS_SCENE_FLICKER in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsSceneFlicker(
+      camera_metadata_enum_android_statistics_scene_flicker sceneFlicker);
+
+  // See ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsHotPixelMapMode(
+      camera_metadata_enum_android_statistics_hot_pixel_map_mode mode);
+
+  // See ANDROID_STATISTICS_LENS_SHADING_MAP_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsLensShadingMapMode(
+      camera_metadata_enum_android_statistics_lens_shading_map_mode
+          lensShadingMapMode);
+
+  // See ANDROID_LENS_OPTICAL_STABILIZATION_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setLensOpticalStabilizationMode(
+      camera_metadata_enum_android_lens_optical_stabilization_mode_t
+          opticalStabilizationMode);
+
   // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
   MetadataBuilder& setAvailableRequestCapabilities(
       const std::vector<
@@ -410,6 +447,12 @@
 std::vector<Resolution> getJpegAvailableThumbnailSizes(
     const aidl::android::hardware::camera::device::CameraMetadata& metadata);
 
+std::optional<FpsRange> getFpsRange(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_capture_intent> getCaptureIntent(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index faae010..f08eb1c 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -142,6 +142,16 @@
   int height = 0;
 };
 
+struct FpsRange {
+  int32_t minFps;
+  int32_t maxFps;
+
+  bool operator<(const FpsRange& other) const {
+    return maxFps == other.maxFps ? minFps < other.minFps
+                                  : maxFps < other.maxFps;
+  }
+};
+
 inline bool isApproximatellySameAspectRatio(const Resolution r1,
                                             const Resolution r2) {
   static constexpr float kAspectRatioEpsilon = 0.05;