[automerger skipped] Merge "libaudiohal@aidl: Synchronize commands sending" into main am: 82ad275673 am: e1ecae6c07 -s ours

am skip reason: Merged-In Ia29c6c4698499acc600f9b6fe02befddd5de5326 with SHA-1 914339da0b is already in history

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/3042653

Change-Id: I07be794d1fd040de36a75fe04dd7abd4f5b11fc6
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/Android.bp b/camera/Android.bp
index 22f1633..d0f8e7e 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     default_applicable_licenses: ["frameworks_av_camera_license"],
 }
 
@@ -46,6 +47,7 @@
 aconfig_declarations {
     name: "camera_platform_flags",
     package: "com.android.internal.camera.flags",
+    container: "system",
     srcs: ["camera_platform.aconfig"],
 }
 
@@ -64,6 +66,7 @@
     name: "camera_headers",
     export_include_dirs: ["include"],
 }
+
 cc_library {
     name: "libcamera_client",
 
@@ -119,10 +122,14 @@
         "frameworks/native/include/media/openmax",
     ],
     export_include_dirs: [
-         "include",
-         "include/camera"
+        "include",
+        "include/camera",
     ],
-    export_shared_lib_headers: ["libcamera_metadata", "libnativewindow", "libgui"],
+    export_shared_lib_headers: [
+        "libcamera_metadata",
+        "libnativewindow",
+        "libgui",
+    ],
 
     cflags: [
         "-Werror",
@@ -152,7 +159,7 @@
 
     export_include_dirs: [
         "include",
-        "include/camera"
+        "include/camera",
     ],
 }
 
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 6b040ab..5d32871 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -71,10 +71,11 @@
 
 sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
         int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
-        bool forceSlowJpegMode)
+        bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
 {
     return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode);
+            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode, deviceId,
+            devicePolicy);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 6759f3b..b2f7cc7 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -73,6 +73,9 @@
     if (res != OK) return res;
 
     res = parcel->writeString16(toString16(clientPackage));
+    if (res != OK) return res;
+
+    res = parcel->writeInt32(deviceId);
     return res;
 }
 
@@ -97,6 +100,7 @@
     if (res != OK) return res;
     clientPackage = toStdString(tempClientPackage);
 
+    res = parcel->readInt32(&deviceId);
     return res;
 }
 
@@ -123,7 +127,7 @@
     };
 
     sp<DeathNotifier>         gDeathNotifier;
-}; // namespace anonymous
+} // namespace anonymous
 
 ///////////////////////////////////////////////////////////
 // CameraBase definition
@@ -159,7 +163,8 @@
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
                                                const std::string& clientPackageName,
                                                int clientUid, int clientPid, int targetSdkVersion,
-                                               bool overrideToPortrait, bool forceSlowJpegMode)
+                                               bool overrideToPortrait, bool forceSlowJpegMode,
+                                               int32_t deviceId, int32_t devicePolicy)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -172,8 +177,8 @@
         ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
                 overrideToPortrait, forceSlowJpegMode);
         ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode,
-                 /*out*/ &c->mCamera);
+                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode, deviceId,
+                devicePolicy, /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -252,7 +257,7 @@
 }
 
 template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras() {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(int32_t deviceId, int32_t devicePolicy) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
 
     if (!cs.get()) {
@@ -261,8 +266,8 @@
     }
     int32_t count;
     binder::Status res = cs->getNumberOfCameras(
-            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-            &count);
+            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, deviceId,
+            devicePolicy, &count);
     if (!res.isOk()) {
         ALOGE("Error reading number of cameras: %s",
                 res.toString8().c_str());
@@ -274,11 +279,12 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
-        bool overrideToPortrait,
+        bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, deviceId, devicePolicy,
+            cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 2e808d1..424923a 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -880,7 +880,7 @@
     return OK;
 }
 
-metadata_vendor_id_t CameraMetadata::getVendorId() {
+metadata_vendor_id_t CameraMetadata::getVendorId() const {
     return get_camera_metadata_vendor_id(mBuffer);
 }
 
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 057ec99..450bdd8 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -439,6 +439,16 @@
         return err;
     }
 
+    auto mostRequestedFpsRange = std::make_pair(0,0);
+    if ((err = parcel->readInt32(&mostRequestedFpsRange.first)) != OK) {
+        ALOGE("%s: Failed to read frame rate range min info!", __FUNCTION__);
+        return err;
+    }
+    if ((err = parcel->readInt32(&mostRequestedFpsRange.second)) != OK) {
+        ALOGE("%s: Failed to read frame rate range max info!", __FUNCTION__);
+        return err;
+    }
+
     mCameraId = toStdString(id);
     mFacing = facing;
     mNewCameraState = newCameraState;
@@ -460,6 +470,7 @@
     mUsedZoomOverride = usedZoomOverride;
     mSessionIndex = sessionIdx;
     mCameraExtensionSessionStats = extStats;
+    mMostRequestedFpsRange = mostRequestedFpsRange;
 
     return OK;
 }
@@ -577,6 +588,16 @@
         return err;
     }
 
+    if ((err = parcel->writeInt32(mMostRequestedFpsRange.first)) != OK) {
+        ALOGE("%s: Failed to write frame rate range min info!", __FUNCTION__);
+        return err;
+    }
+
+    if ((err = parcel->writeInt32(mMostRequestedFpsRange.second)) != OK) {
+        ALOGE("%s: Failed to write frame rate range max info!", __FUNCTION__);
+        return err;
+    }
+
     return OK;
 }
 
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index fb26f83..c12a1a1 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -466,7 +466,7 @@
 
 int VendorTagDescriptorCache::getTagType(uint32_t tag,
         metadata_vendor_id_t id) const {
-    int ret = 0;
+    int ret = -1;
     auto desc = mVendorMap.find(id);
     if (desc != mVendorMap.end()) {
         ret = desc->second->getTagType(tag);
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 0eeeb7f..885749d 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -63,14 +63,33 @@
     const int CAMERA_TYPE_ALL = 1;
 
     /**
-     * Return the number of camera devices available in the system
+     * Return the number of camera devices available in the system.
+     *
+     * @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
+     *        or CAMERA_TYPE_ALL.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
      */
-    int getNumberOfCameras(int type);
+    int getNumberOfCameras(int type, int deviceId, int devicePolicy);
 
     /**
-     * Fetch basic camera information for a camera device
+     * Fetch basic camera information for a camera.
+     *
+     * @param cameraId The ID of the camera to fetch information for.
+     * @param overrideToPortrait Whether to override the sensor orientation information to
+     *        correspond to portrait.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return CameraInfo for the camera.
      */
-    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait);
+    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait, int deviceId,
+            int devicePolicy);
 
     /**
      * Default UID/PID values for non-privileged callers of
@@ -80,7 +99,20 @@
     const int USE_CALLING_PID = -1;
 
     /**
-     * Open a camera device through the old camera API
+     * Open a camera device through the old camera API.
+     *
+     * @param cameraId The ID of the camera to open.
+     * @param opPackageName The package name to report for the app-ops.
+     * @param clientUid UID for the calling client.
+     * @param clientPid PID for the calling client.
+     * @param overrideToPortrait Whether to override the sensor orientation information to
+     *        correspond to portrait.
+     * @param forceSlowJpegMode Whether to force slow jpeg mode.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
      */
     ICamera connect(ICameraClient client,
             int cameraId,
@@ -88,11 +120,24 @@
             int clientUid, int clientPid,
             int targetSdkVersion,
             boolean overrideToPortrait,
-            boolean forceSlowJpegMode);
+            boolean forceSlowJpegMode,
+            int deviceId,
+            int devicePolicy);
 
     /**
-     * Open a camera device through the new camera API
-     * Only supported for device HAL versions >= 3.2
+     * Open a camera device through the new camera API.
+     * Only supported for device HAL versions >= 3.2.
+     *
+     * @param cameraId The ID of the camera to open.
+     * @param opPackageName The package name to report for the app-ops.
+     * @param clientUid UID for the calling client.
+     * @param overrideToPortrait Whether to override the sensor orientation information to
+     *        correspond to portrait.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
             @utf8InCpp String cameraId,
@@ -100,7 +145,9 @@
             @nullable @utf8InCpp String featureId,
             int clientUid, int oomScoreOffset,
             int targetSdkVersion,
-            boolean overrideToPortrait);
+            boolean overrideToPortrait,
+            int deviceId,
+            int devicePolicy);
 
     /**
      * Add listener for changes to camera device and flashlight state.
@@ -168,9 +215,19 @@
     /**
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
+     *
+     * @param cameraId The ID of the camera to fetch metadata for.
+     * @param overrideToPortrait Whether to override the sensor orientation information to
+     *        correspond to portrait.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Characteristics for the given camera.
      */
     CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
-            boolean overrideToPortrait);
+            boolean overrideToPortrait, int deviceId, int devicePolicy);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -206,14 +263,46 @@
     ICameraInjectionSession injectCamera(@utf8InCpp String packageName, @utf8InCpp String internalCamId,
             @utf8InCpp String externalCamId, in ICameraInjectionCallback CameraInjectionCallback);
 
-    void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder);
+    /**
+     * Set the torch mode for a camera device.
+     *
+     * @param cameraId The ID of the camera to set torch mode for.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     */
+    void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
+            int deviceId, int devicePolicy);
 
-    // Change the brightness level of the flash unit associated with cameraId to strengthLevel.
-    // If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
-    void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel, IBinder clientBinder);
+    /**
+     * Change the brightness level of the flash unit associated with cameraId to strengthLevel.
+     * If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
+     *
+     * @param cameraId The ID of the camera.
+     * @param strengthLevel The torch strength level to set for the camera.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     */
+    void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
+            IBinder clientBinder, int deviceId, int devicePolicy);
 
-    // Get the brightness level of the flash unit associated with cameraId.
-    int getTorchStrengthLevel(@utf8InCpp String cameraId);
+    /**
+     * Get the brightness level of the flash unit associated with cameraId.
+     *
+     * @param cameraId The ID of the camera.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Torch strength level for the camera.
+     */
+    int getTorchStrengthLevel(@utf8InCpp String cameraId, int deviceId, int devicePolicy);
 
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
@@ -274,18 +363,61 @@
     const int DEVICE_STATE_FOLDED = 4;
     const int DEVICE_STATE_LAST_FRAMEWORK_BIT = 0x80000000; // 1 << 31;
 
-    // Create a CaptureRequest metadata based on template id
-    CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId);
+    /**
+     * Create a CaptureRequest metadata based on template id
+     *
+     * @param cameraId The camera id to create the CaptureRequest for.
+     * @param templateId The template id create the CaptureRequest for.
+     * @param deviceId the device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Metadata representing the CaptureRequest.
+     */
+    CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
+            int deviceId, int devicePolicy);
 
     /**
-      * Check whether a particular session configuration with optional session parameters
-      * has camera device support.
-      *
-      * @param cameraId The camera id to query session configuration on
-      * @param sessionConfiguration Specific session configuration to be verified.
-      * @return true  - in case the stream combination is supported.
-      *         false - in case there is no device support.
-      */
+     * Check whether a particular session configuration with optional session parameters
+     * has camera device support.
+     *
+     * @param cameraId The camera id to query session configuration for
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param sessionConfiguration Specific session configuration to be verified.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return true  - in case the stream combination is supported.
+     *         false - in case there is no device support.
+     */
     boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
-            in SessionConfiguration sessionConfiguration);
+            int targetSdkVersion, in SessionConfiguration sessionConfiguration,
+            int deviceId, int devicePolicy);
+
+    /**
+     * Get the camera characteristics for a particular session configuration for
+     * the given camera device.
+     *
+     * @param cameraId ID of the device for which the session characteristics must be fetched.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param overrideToPortrait Whether to override the sensor orientation information to
+     *                           correspond to portrait.
+     * @param sessionConfiguration Session configuration for which the characteristics
+     *                             must be fetched.
+     * @param deviceId The device id of the context associated with the caller.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return Characteristics associated with the given session.
+     */
+    CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
+            int targetSdkVersion,
+            boolean overrideToPortrait,
+            in SessionConfiguration sessionConfiguration,
+            int deviceId,
+            int devicePolicy);
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index 23a87d3..9c8c88a 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -51,14 +51,20 @@
     // Use to initialize variables only
     const int STATUS_UNKNOWN          = -1;
 
-    oneway void onStatusChanged(int status, @utf8InCpp String cameraId);
+    // We pass deviceId associated with a camera in the callbacks, which is the id of the virtual
+    // device owning the camera (for virtual cameras), or kDefaultDeviceId (for real
+    // cameras). The deviceId is being passed so that the API layer (CameraManagerGlobal) can filter
+    // out the cameras that don't correspond to the context associated with the caller who
+    // registers a callback.
+
+    oneway void onStatusChanged(int status, @utf8InCpp String cameraId, int deviceId);
 
     /**
      * Notify registered client about status changes for a physical camera backing
      * a logical camera.
      */
     oneway void onPhysicalCameraStatusChanged(int status, @utf8InCpp String cameraId,
-            @utf8InCpp String physicalCameraId);
+            @utf8InCpp String physicalCameraId, int deviceId);
 
     /**
      * The torch mode status of a camera.
@@ -82,9 +88,9 @@
     // Use to initialize variables only
     const int TORCH_STATUS_UNKNOWN = -1;
 
-    oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId);
+    oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId, int deviceId);
 
-    oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength);
+    oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength, int deviceId);
 
     /**
      * Notify registered clients about camera access priority changes.
@@ -98,6 +104,6 @@
      * Only clients with android.permission.CAMERA_OPEN_CLOSE_LISTENER permission
      * will receive such callbacks.
      */
-    oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId);
-    oneway void onCameraClosed(@utf8InCpp String cameraId);
+    oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId, int deviceId);
+    oneway void onCameraClosed(@utf8InCpp String cameraId, int deviceId);
 }
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 843e0d4..8e1fcc0 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -99,15 +99,6 @@
       */
     boolean isSessionConfigurationSupported(in SessionConfiguration sessionConfiguration);
 
-    /**
-     * Get the camera characteristics for a particular session configuration
-     *
-     * @param sessionConfiguration Specific session configuration for which the characteristics
-     * are fetched.
-     * @return - characteristics associated with the given session.
-     */
-    CameraMetadataNative getSessionCharacteristics(in SessionConfiguration sessionConfiguration);
-
     void deleteStream(int streamId);
 
     /**
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 73b153c..2d1af32 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -25,6 +25,7 @@
 #include <binder/Parcel.h>
 #include <gui/view/Surface.h>
 #include <system/camera_metadata.h>
+#include <system/graphics.h>
 #include <utils/String8.h>
 
 
@@ -102,6 +103,25 @@
     return mUseReadoutTimestamp;
 }
 
+int OutputConfiguration::getFormat() const {
+    return mFormat;
+}
+
+int OutputConfiguration::getDataspace() const {
+    return mDataspace;
+}
+
+int64_t OutputConfiguration::getUsage() const {
+    return mUsage;
+}
+
+bool OutputConfiguration::isComplete() const {
+    return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
+             mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
+             mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
+             mGbps.empty());
+}
+
 OutputConfiguration::OutputConfiguration() :
         mRotation(INVALID_ROTATION),
         mSurfaceSetID(INVALID_SET_ID),
@@ -116,7 +136,10 @@
         mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
         mTimestampBase(TIMESTAMP_BASE_DEFAULT),
         mMirrorMode(MIRROR_MODE_AUTO),
-        mUseReadoutTimestamp(false) {
+        mUseReadoutTimestamp(false),
+        mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
+        mDataspace(0),
+        mUsage(0) {
 }
 
 OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -234,6 +257,24 @@
         return err;
     }
 
+    int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    if ((err = parcel->readInt32(&format)) != OK) {
+        ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int dataspace = 0;
+    if ((err = parcel->readInt32(&dataspace)) != OK) {
+        ALOGE("%s: Failed to read dataspace from parcel", __FUNCTION__);
+        return err;
+    }
+
+    int64_t usage = 0;
+    if ((err = parcel->readInt64(&usage)) != OK) {
+        ALOGE("%s: Failed to read usage flag from parcel", __FUNCTION__);
+        return err;
+    }
+
     mRotation = rotation;
     mSurfaceSetID = setID;
     mSurfaceType = surfaceType;
@@ -256,13 +297,17 @@
     mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
     mDynamicRangeProfile = dynamicProfile;
     mColorSpace = colorSpace;
+    mFormat = format;
+    mDataspace = dataspace;
+    mUsage = usage;
 
     ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
           " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
-          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
+          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d, format = %d, "
+          "dataspace = %d, usage = %" PRId64,
           __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
           mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
-          mMirrorMode, mUseReadoutTimestamp);
+          mMirrorMode, mUseReadoutTimestamp, mFormat, mDataspace, mUsage);
 
     return err;
 }
@@ -283,6 +328,9 @@
     mTimestampBase = TIMESTAMP_BASE_DEFAULT;
     mMirrorMode = MIRROR_MODE_AUTO;
     mUseReadoutTimestamp = false;
+    mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    mDataspace = 0;
+    mUsage = 0;
 }
 
 OutputConfiguration::OutputConfiguration(
@@ -296,7 +344,9 @@
     mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
     mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
     mTimestampBase(TIMESTAMP_BASE_DEFAULT),
-    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
+    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
+    mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
+    mUsage(0) { }
 
 status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
 
@@ -362,6 +412,15 @@
     err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
     if (err != OK) return err;
 
+    err = parcel->writeInt32(mFormat);
+    if (err != OK) return err;
+
+    err = parcel->writeInt32(mDataspace);
+    if (err != OK) return err;
+
+    err = parcel->writeInt64(mUsage);
+    if (err != OK) return err;
+
     return OK;
 }
 
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 5d2a263..4fcceae 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -1,8 +1,10 @@
 package: "com.android.internal.camera.flags"
+container: "system"
 
 flag {
      namespace: "camera_platform"
      name: "camera_hsum_permission"
+     is_exported: true
      description: "Camera access by headless system user"
      bug: "273539631"
 }
@@ -10,6 +12,7 @@
 flag {
      namespace: "camera_platform"
      name: "concert_mode"
+     is_exported: true
      description: "Introduces a new concert mode camera extension type"
      bug: "297083874"
 }
@@ -17,12 +20,20 @@
 flag {
      namespace: "camera_platform"
      name: "feature_combination_query"
+     is_exported: true
      description: "Query feature combination support and session specific characteristics"
      bug: "309627704"
 }
 
 flag {
      namespace: "camera_platform"
+     name: "watch_foreground_changes"
+     description: "Request AppOps to notify changes in the foreground status of the client"
+     bug: "290086710"
+}
+
+flag {
+     namespace: "camera_platform"
      name: "log_ultrawide_usage"
      description: "Enable measuring how much usage there is for ultrawide-angle cameras"
      bug: "300515796"
@@ -31,6 +42,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_manual_flash_strength_control"
+     is_exported: true
      description: "Flash brightness level control in manual flash mode"
      bug: "238348881"
 }
@@ -66,6 +78,7 @@
 flag {
      namespace: "camera_platform"
      name: "camera_ae_mode_low_light_boost"
+     is_exported: true
      description: "An AE mode that enables increased brightening in low light scenes"
      bug: "312803148"
 }
@@ -76,3 +89,131 @@
      description: "Enable creating MultiResolutionImageReader with usage flag configuration"
      bug: "301588215"
 }
+
+flag {
+     namespace: "camera_platform"
+     name: "use_ro_board_api_level_for_vndk_version"
+     description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
+     bug: "312315580"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_extensions_characteristics_get"
+     is_exported: true
+     description: "Enable get extension specific camera characteristics API"
+     bug: "280649914"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "delay_lazy_hal_instantiation"
+     description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
+     bug: "319735068"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "return_buffers_outside_locks"
+     description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+     bug: "315526878"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_device_setup"
+     is_exported: true
+     description: "Create an intermediate Camera Device class for limited CameraDevice access."
+     bug: "320741775"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "camera_privacy_allowlist"
+     is_exported: true
+     description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+     bug: "282814430"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "surface_ipc"
+     description: "Optimize Surface binder IPC"
+     bug: "323292530"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "extension_10_bit"
+     is_exported: true
+     description: "Enables 10-bit support in the camera extensions."
+     bug: "316375635"
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "single_thread_executor"
+     description: "Ensure device logic is run within one thread."
+     bug: "305857746"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "surface_leak_fix"
+     description: "Address Surface release leaks in CaptureRequest"
+     bug: "324071855"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "concert_mode_api"
+     description: "Covers the eyes free videography public facing API"
+     bug: "297083874"
+}
+
+
+flag {
+     namespace: "camera_platform"
+     name: "cache_permission_services"
+     description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
+     bug: "326139956"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "check_session_support_before_session_char"
+     description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
+     bug: "327008530"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "calculate_perf_override_during_session_support"
+     description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
+     bug: "332975108"
+     metadata {
+       purpose: PURPOSE_BUGFIX
+     }
+}
+
+flag {
+     namespace: "camera_platform"
+     name: "analytics_24q3"
+     description: "Miscellaneous camera platform metrics for 24Q3"
+     bug: "332557570"
+}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 13b705c..6862cb1 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/camera/cameraserver/cameraserver.rc b/camera/cameraserver/cameraserver.rc
index e307653..63fa2b0 100644
--- a/camera/cameraserver/cameraserver.rc
+++ b/camera/cameraserver/cameraserver.rc
@@ -5,5 +5,6 @@
     ioprio rt 4
     task_profiles CameraServiceCapacity MaxPerformance
     rlimit rtprio 10 10
+    capabilities SYS_NICE
     onrestart class_restart cameraWatchdog
     interface aidl android.frameworks.cameraservice.service.ICameraService/default
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 6655f82..dfa53d2 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -27,6 +27,7 @@
 #include <camera/android/hardware/ICamera.h>
 #include <camera/android/hardware/ICameraClient.h>
 #include <camera/CameraBase.h>
+#include <camera/CameraUtils.h>
 
 namespace android {
 
@@ -58,7 +59,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const std::string&, int, int, int, bool, bool,
+        int, const std::string&, int, int, int, bool, bool, int32_t, int32_t,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -82,7 +83,8 @@
     static  sp<Camera>  connect(int cameraId,
                                 const std::string& clientPackageName,
                                 int clientUid, int clientPid, int targetSdkVersion,
-                                bool overrideToPortrait, bool forceSlowJpegMode);
+                                bool overrideToPortrait, bool forceSlowJpegMode,
+                                int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
 
             virtual     ~Camera();
 
@@ -197,6 +199,6 @@
     friend class        CameraBase;
 };
 
-}; // namespace android
+} // namespace android
 
 #endif
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 6af7f2a..85ddbd6 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -62,16 +62,15 @@
 
     virtual status_t writeToParcel(android::Parcel* parcel) const;
     virtual status_t readFromParcel(const android::Parcel* parcel);
-
 };
 
 /**
- * Basic status information about a camera device - its name and its current
+ * Basic status information about a camera device - its id and its current
  * state.
  */
 struct CameraStatus : public android::Parcelable {
     /**
-     * The name of the camera device
+     * The app-visible id of the camera device
      */
     std::string cameraId;
 
@@ -90,12 +89,19 @@
      */
     std::string clientPackage;
 
+    /**
+     * The id of the device owning the camera. For virtual cameras, this is the id of the virtual
+     * device owning the camera. For real cameras, this is the default device id, i.e.,
+     * kDefaultDeviceId.
+     */
+    int32_t deviceId;
+
     virtual status_t writeToParcel(android::Parcel* parcel) const;
     virtual status_t readFromParcel(const android::Parcel* parcel);
 
     CameraStatus(std::string id, int32_t s, const std::vector<std::string>& unavailSubIds,
-            const std::string& clientPkg) : cameraId(id), status(s),
-            unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg) {}
+            const std::string& clientPkg, int32_t devId) : cameraId(id), status(s),
+            unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg), deviceId(devId) {}
     CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
 };
 
@@ -103,7 +109,6 @@
 
 using hardware::CameraInfo;
 
-
 template <typename TCam>
 struct CameraTraits {
 };
@@ -120,15 +125,18 @@
     static sp<TCam>      connect(int cameraId,
                                  const std::string& clientPackageName,
                                  int clientUid, int clientPid, int targetSdkVersion,
-                                 bool overrideToPortrait, bool forceSlowJpegMode);
+                                 bool overrideToPortrait, bool forceSlowJpegMode,
+                                 int32_t deviceId, int32_t devicePolicy);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
 
-    static int           getNumberOfCameras();
+    static int           getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
 
     static status_t      getCameraInfo(int cameraId,
                                        bool overrideToPortrait,
+                                       int32_t deviceId,
+                                       int32_t devicePolicy,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
 
@@ -167,6 +175,6 @@
     typedef CameraBase<TCam>         CameraBaseT;
 };
 
-}; // namespace android
+} // namespace android
 
 #endif
diff --git a/camera/include/camera/CameraMetadata.h b/camera/include/camera/CameraMetadata.h
index c56ee6d..2903dfb 100644
--- a/camera/include/camera/CameraMetadata.h
+++ b/camera/include/camera/CameraMetadata.h
@@ -245,7 +245,7 @@
     /**
      * Return the current vendor tag id associated with this metadata.
      */
-    metadata_vendor_id_t getVendorId();
+    metadata_vendor_id_t getVendorId() const;
 
   private:
     camera_metadata_t *mBuffer;
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 06c154d..34ee882 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -167,6 +167,8 @@
 
     CameraExtensionSessionStats mCameraExtensionSessionStats;
 
+    std::pair<int32_t, int32_t> mMostRequestedFpsRange;
+
     // Constructors
     CameraSessionStats();
     CameraSessionStats(const std::string& cameraId, int facing, int newCameraState,
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index 31d25e7..d358407 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -26,6 +26,9 @@
 
 namespace android {
 
+// Device id of a context associated with the default device.
+constexpr int32_t kDefaultDeviceId = 0;
+
 /**
  * CameraUtils contains utility methods that are shared between the native
  * camera client, and the camera service.
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 3f74b4a..83ce39d 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -35,10 +35,13 @@
 
     static const int INVALID_ROTATION;
     static const int INVALID_SET_ID;
-    enum SurfaceType{
+    enum SurfaceType {
         SURFACE_TYPE_UNKNOWN = -1,
         SURFACE_TYPE_SURFACE_VIEW = 0,
-        SURFACE_TYPE_SURFACE_TEXTURE = 1
+        SURFACE_TYPE_SURFACE_TEXTURE = 1,
+        SURFACE_TYPE_MEDIA_RECORDER = 2,
+        SURFACE_TYPE_MEDIA_CODEC = 3,
+        SURFACE_TYPE_IMAGE_READER = 4
     };
     enum TimestampBaseType {
         TIMESTAMP_BASE_DEFAULT = 0,
@@ -71,6 +74,10 @@
     int                        getTimestampBase() const;
     int                        getMirrorMode() const;
     bool                       useReadoutTimestamp() const;
+    int                        getFormat() const;
+    int                        getDataspace() const;
+    int64_t                    getUsage() const;
+    bool                       isComplete() const;
 
     // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
     const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -98,7 +105,7 @@
     OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
                         int rotation, const std::string& physicalCameraId,
                         int surfaceSetID = INVALID_SET_ID,
-                        int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
+                        int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
                         int height = 0, bool isShared = false);
 
     bool operator == (const OutputConfiguration& other) const {
@@ -118,7 +125,10 @@
                 mStreamUseCase == other.mStreamUseCase &&
                 mTimestampBase == other.mTimestampBase &&
                 mMirrorMode == other.mMirrorMode &&
-                mUseReadoutTimestamp == other.mUseReadoutTimestamp);
+                mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
+                mFormat == other.mFormat &&
+                mDataspace == other.mDataspace &&
+                mUsage == other.mUsage);
     }
     bool operator != (const OutputConfiguration& other) const {
         return !(*this == other);
@@ -173,6 +183,15 @@
         if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
             return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
         }
+        if (mFormat != other.mFormat) {
+            return mFormat < other.mFormat;
+        }
+        if (mDataspace != other.mDataspace) {
+            return mDataspace < other.mDataspace;
+        }
+        if (mUsage != other.mUsage) {
+            return mUsage < other.mUsage;
+        }
         return gbpsLessThan(other);
     }
 
@@ -203,6 +222,9 @@
     int                        mTimestampBase;
     int                        mMirrorMode;
     bool                       mUseReadoutTimestamp;
+    int                        mFormat;
+    int                        mDataspace;
+    int64_t                    mUsage;
 };
 } // namespace params
 } // namespace camera2
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index d4dd546..421469a 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -17,6 +17,7 @@
 // frameworks/av/include.
 
 package {
+    default_team: "trendy_team_camera_framework",
     default_applicable_licenses: ["frameworks_av_camera_ndk_license"],
 }
 
@@ -154,8 +155,8 @@
         "libcamera_metadata",
         "libmediandk",
         "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.device-V1-ndk",
-        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V2-ndk",
+        "android.frameworks.cameraservice.service-V2-ndk",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
@@ -188,7 +189,6 @@
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
-        "android.hidl.token@1.0",
     ],
     cflags: [
         "-D__ANDROID_VNDK__",
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 4387cc6..92de1e4 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -213,7 +213,7 @@
 EXPORT
 camera_status_t ACameraCaptureSession_prepareWindow(
         ACameraCaptureSession* session,
-        ACameraWindowType *window) {
+        ANativeWindow *window) {
     ATRACE_CALL();
     if (session == nullptr || window == nullptr) {
         ALOGE("%s: Error: session %p / window %p is null", __FUNCTION__, session, window);
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 8211671..f2ec573 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -124,7 +124,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionOutput_create(
-        ACameraWindowType* window, /*out*/ACaptureSessionOutput** out) {
+        ANativeWindow* window, /*out*/ACaptureSessionOutput** out) {
     ATRACE_CALL();
     if (window == nullptr || out == nullptr) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
@@ -137,7 +137,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionSharedOutput_create(
-        ACameraWindowType* window, /*out*/ACaptureSessionOutput** out) {
+        ANativeWindow* window, /*out*/ACaptureSessionOutput** out) {
     ATRACE_CALL();
     if (window == nullptr || out == nullptr) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
@@ -150,7 +150,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionPhysicalOutput_create(
-        ACameraWindowType* window, const char* physicalId,
+        ANativeWindow* window, const char* physicalId,
         /*out*/ACaptureSessionOutput** out) {
     ATRACE_CALL();
     if (window == nullptr || physicalId == nullptr || out == nullptr) {
@@ -164,7 +164,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *out,
-        ACameraWindowType* window) {
+        ANativeWindow* window) {
     ATRACE_CALL();
     if ((window == nullptr) || (out == nullptr)) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
@@ -190,7 +190,7 @@
 
 EXPORT
 camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *out,
-        ACameraWindowType* window) {
+        ANativeWindow* window) {
     ATRACE_CALL();
     if ((window == nullptr) || (out == nullptr)) {
         ALOGE("%s: Error: bad argument. window %p, out %p",
diff --git a/camera/ndk/NdkCameraMetadata.cpp b/camera/ndk/NdkCameraMetadata.cpp
index 7d3a53e..a2dfaee 100644
--- a/camera/ndk/NdkCameraMetadata.cpp
+++ b/camera/ndk/NdkCameraMetadata.cpp
@@ -121,6 +121,18 @@
 }
 
 EXPORT
+camera_status_t ACameraMetadata_getTagFromName(
+        const ACameraMetadata* acm, const char* name, uint32_t* tag) {
+    ATRACE_CALL();
+    if (acm == nullptr || name == nullptr || tag == nullptr) {
+        ALOGE("%s: invalid argument! metadata %p, name %p, tag %p",
+               __FUNCTION__, acm, name, tag);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return acm->getTagFromName(name, tag);
+}
+
+EXPORT
 ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src) {
     ATRACE_CALL();
     if (src == nullptr) {
diff --git a/camera/ndk/NdkCaptureRequest.cpp b/camera/ndk/NdkCaptureRequest.cpp
index 87de4a9..b851a1d 100644
--- a/camera/ndk/NdkCaptureRequest.cpp
+++ b/camera/ndk/NdkCaptureRequest.cpp
@@ -27,7 +27,7 @@
 
 EXPORT
 camera_status_t ACameraOutputTarget_create(
-        ACameraWindowType* window, ACameraOutputTarget** out) {
+        ANativeWindow* window, ACameraOutputTarget** out) {
     ATRACE_CALL();
     if (window == nullptr) {
         ALOGE("%s: Error: input window is null", __FUNCTION__);
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index 73439c7..449c0b4 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -146,7 +146,7 @@
     return ret;
 }
 
-camera_status_t ACameraCaptureSession::prepare(ACameraWindowType* window) {
+camera_status_t ACameraCaptureSession::prepare(ANativeWindow* window) {
 #ifdef __ANDROID_VNDK__
     std::shared_ptr<acam::CameraDevice> dev = getDevicePtr();
 #else
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 88135ba..0d7a2c1 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -23,14 +23,14 @@
 
 #ifdef __ANDROID_VNDK__
 #include "ndk_vendor/impl/ACameraDevice.h"
-#include "ndk_vendor/impl/ACameraCaptureSessionVendor.h"
 #else
 #include "ACameraDevice.h"
+#endif
 
 using namespace android;
 
 struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(ACameraWindowType* window, bool isShared = false,
+    explicit ACaptureSessionOutput(ANativeWindow* window, bool isShared = false,
             const char* physicalCameraId = "") :
             mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
 
@@ -47,28 +47,27 @@
         return mWindow > other.mWindow;
     }
 
-    inline bool isWindowEqual(ACameraWindowType* window) const {
+    inline bool isWindowEqual(ANativeWindow* window) const {
         return mWindow == window;
     }
 
     // returns true if the window was successfully added, false otherwise.
-    inline bool addSharedWindow(ACameraWindowType* window) {
+    inline bool addSharedWindow(ANativeWindow* window) {
         auto ret = mSharedWindows.insert(window);
         return ret.second;
     }
 
     // returns the number of elements removed.
-    inline size_t removeSharedWindow(ACameraWindowType* window) {
+    inline size_t removeSharedWindow(ANativeWindow* window) {
         return mSharedWindows.erase(window);
     }
 
-    ACameraWindowType* mWindow;
-    std::set<ACameraWindowType *> mSharedWindows;
+    ANativeWindow* mWindow;
+    std::set<ANativeWindow*> mSharedWindows;
     bool           mIsShared;
     int            mRotation = CAMERA3_STREAM_ROTATION_0;
     std::string mPhysicalCameraId;
 };
-#endif
 
 struct ACaptureSessionOutputContainer {
     std::set<ACaptureSessionOutput> mOutputs;
@@ -147,7 +146,7 @@
         mPreparedCb.context = context;
         mPreparedCb.onWindowPrepared = cb;
     }
-    camera_status_t prepare(ACameraWindowType *window);
+    camera_status_t prepare(ANativeWindow *window);
 
     ACameraDevice* getDevice();
 
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 97d65b0..1fa71f4 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -341,7 +341,7 @@
     return ACAMERA_OK;
 }
 
-camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+camera_status_t CameraDevice::prepareLocked(ANativeWindow *window) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         return ret;
@@ -1097,7 +1097,7 @@
                     if (onWindowPrepared == nullptr) {
                         return;
                     }
-                    ACameraWindowType* anw;
+                    ANativeWindow* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
@@ -1823,7 +1823,7 @@
         return ret;
     }
     // We've found the window corresponding to the surface id.
-    ACameraWindowType *window = it->second.first;
+    ANativeWindow *window = it->second.first;
     sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
     msg->setPointer(kContextKey, session->mPreparedCb.context);
     msg->setPointer(kAnwKey, window);
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 4658d18..2b9f327 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -151,7 +151,7 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
-    camera_status_t prepareLocked(ACameraWindowType *window);
+    camera_status_t prepareLocked(ANativeWindow *window);
 
     camera_status_t allocateCaptureRequest(
             const ACaptureRequest* request, sp<CaptureRequest>& outReq);
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 8c3424f..0744992 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -32,6 +32,9 @@
 
 namespace android {
 namespace acam {
+
+// TODO(b/291736219): Add device-awareness to ACameraManager.
+
 // Static member definitions
 const char* CameraManagerGlobal::kCameraIdKey   = "CameraId";
 const char* CameraManagerGlobal::kPhysicalCameraIdKey   = "PhysicalCameraId";
@@ -125,6 +128,12 @@
         std::vector<hardware::CameraStatus> cameraStatuses{};
         mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
         for (auto& c : cameraStatuses) {
+            // Skip callback for cameras not belonging to the default device, as NDK doesn't support
+            // device awareness yet.
+            if (c.deviceId != kDefaultDeviceId) {
+                continue;
+            }
+
             onStatusChangedLocked(c.status, c.cameraId);
 
             for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
@@ -461,7 +470,13 @@
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+        int32_t status, const std::string& cameraId, int deviceId) {
+    // Skip callback for cameras not belonging to the default device, as NDK doesn't support
+    // device awareness yet.
+    if (deviceId != kDefaultDeviceId) {
+        return binder::Status::ok();
+    }
+
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         cm->onStatusChanged(status, cameraId);
@@ -472,7 +487,14 @@
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+        int32_t status, const std::string& cameraId, const std::string& physicalCameraId,
+        int deviceId) {
+    // Skip callback for cameras not belonging to the default device, as NDK doesn't support
+    // device awareness yet.
+    if (deviceId != kDefaultDeviceId) {
+        return binder::Status::ok();
+    }
+
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         cm->onStatusChanged(status, cameraId, physicalCameraId);
@@ -697,7 +719,8 @@
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
     binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
-            targetSdkVersion, /*overrideToPortrait*/false, &rawMetadata);
+            targetSdkVersion, /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+            &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
             case hardware::ICameraService::ERROR_DISCONNECTED:
@@ -749,7 +772,8 @@
     binder::Status serviceRet = cs->connectDevice(
             callbacks, cameraId, "", {},
             hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*overrideToPortrait*/false, /*out*/&deviceRemote);
+            targetSdkVersion, /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+            /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
         ALOGE("%s: connect camera device failed: %s", __FUNCTION__, serviceRet.toString8().c_str());
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index c135d0f..c6e2bf9 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -87,23 +87,24 @@
     class CameraServiceListener final : public hardware::BnCameraServiceListener {
       public:
         explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-        virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId);
+        virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId,
+                int32_t deviceId);
         virtual binder::Status onPhysicalCameraStatusChanged(int32_t status,
-                const std::string& cameraId, const std::string& physicalCameraId);
+                const std::string& cameraId, const std::string& physicalCameraId, int32_t deviceId);
 
         // Torch API not implemented yet
-        virtual binder::Status onTorchStatusChanged(int32_t, const std::string&) {
+        virtual binder::Status onTorchStatusChanged(int32_t, const std::string&, int32_t) {
             return binder::Status::ok();
         }
-        virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t) {
+        virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t, int32_t) {
             return binder::Status::ok();
         }
 
         virtual binder::Status onCameraAccessPrioritiesChanged();
-        virtual binder::Status onCameraOpened(const std::string&, const std::string&) {
+        virtual binder::Status onCameraOpened(const std::string&, const std::string&, int32_t) {
             return binder::Status::ok();
         }
-        virtual binder::Status onCameraClosed(const std::string&) {
+        virtual binder::Status onCameraClosed(const std::string&, int32_t) {
             return binder::Status::ok();
         }
 
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index b6b8012..69b30f7 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -18,6 +18,8 @@
 #define LOG_TAG "ACameraMetadata"
 
 #include "ACameraMetadata.h"
+
+#include <camera_metadata_hidden.h>
 #include <utils/Vector.h>
 #include <system/graphics.h>
 #include <media/NdkImage.h>
@@ -85,6 +87,19 @@
         filterDurations(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
     }
     // TODO: filter request/result keys
+    const CameraMetadata& metadata = *mData;
+    const camera_metadata_t *rawMetadata = metadata.getAndLock();
+    metadata_vendor_id_t vendorTagId = get_camera_metadata_vendor_id(rawMetadata);
+    metadata.unlock(rawMetadata);
+    sp<VendorTagDescriptorCache> vtCache = VendorTagDescriptorCache::getGlobalVendorTagCache();
+    if (vtCache == nullptr) {
+        ALOGE("%s: error vendor tag descriptor cache is not initialized", __FUNCTION__);
+        return;
+    }
+    vtCache->getVendorTagDescriptor(vendorTagId, &mVTags);
+    if (mVTags == nullptr) {
+        ALOGE("%s: error retrieving vendor tag descriptor", __FUNCTION__);
+    }
 }
 
 bool
@@ -473,6 +488,13 @@
     return (*mData);
 }
 
+camera_status_t
+ACameraMetadata::getTagFromName(const char *name, uint32_t *tag) const {
+    Mutex::Autolock _l(mLock);
+    status_t status = CameraMetadata::getTagFromName(name, mVTags.get(), tag);
+    return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
+}
+
 bool
 ACameraMetadata::isLogicalMultiCamera(size_t* count, const char*const** physicalCameraIds) const {
     if (mType != ACM_CHARACTERISTICS) {
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 084a60b..e89e620 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -27,9 +27,17 @@
 
 #ifdef __ANDROID_VNDK__
 #include <CameraMetadata.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTag.h>
+#include <aidl/android/frameworks/cameraservice/common/VendorTagSection.h>
+#include <aidl/android/frameworks/cameraservice/common/ProviderIdAndVendorTagSections.h>
+#include <VendorTagDescriptor.h>
 using CameraMetadata = android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using ::aidl::android::frameworks::cameraservice::common::ProviderIdAndVendorTagSections;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor;
+using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
 #else
 #include <camera/CameraMetadata.h>
+#include <camera/VendorTagDescriptor.h>
 #endif
 
 #include <camera/NdkCameraMetadata.h>
@@ -73,6 +81,8 @@
 
     camera_status_t getTags(/*out*/int32_t* numTags,
                             /*out*/const uint32_t** tags) const;
+    camera_status_t
+    getTagFromName(const char *name, uint32_t *tag) const;
 
     const CameraMetadata& getInternalData() const;
     bool isLogicalMultiCamera(size_t* count, const char* const** physicalCameraIds) const;
@@ -134,6 +144,7 @@
 
     std::vector<const char*> mStaticPhysicalCameraIds;
     std::vector<String8> mStaticPhysicalCameraIdValues;
+    sp<VendorTagDescriptor> mVTags = nullptr;
 };
 
 #endif // _ACAMERA_METADATA_H
diff --git a/camera/ndk/impl/ACaptureRequest.h b/camera/ndk/impl/ACaptureRequest.h
index 2ffcafe..118c2a5 100644
--- a/camera/ndk/impl/ACaptureRequest.h
+++ b/camera/ndk/impl/ACaptureRequest.h
@@ -22,11 +22,8 @@
 
 using namespace android;
 
-#ifdef __ANDROID_VNDK__
-#include "ndk_vendor/impl/ACaptureRequestVendor.h"
-#else
 struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(ACameraWindowType* window) : mWindow(window) {};
+    explicit ACameraOutputTarget(ANativeWindow* window) : mWindow(window) {};
 
     bool operator == (const ACameraOutputTarget& other) const {
         return mWindow == other.mWindow;
@@ -41,9 +38,8 @@
         return mWindow > other.mWindow;
     }
 
-    ACameraWindowType* mWindow;
+    ANativeWindow* mWindow;
 };
-#endif
 
 struct ACameraOutputTargets {
     std::set<ACameraOutputTarget> mOutputs;
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 099c5c5..cf6b970 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -124,7 +124,7 @@
  */
 typedef void (*ACameraCaptureSession_prepareCallback)(
         void *context,
-        ACameraWindowType *window,
+        ANativeWindow *window,
         ACameraCaptureSession *session);
 
 /// Enum for describing error reason in {@link ACameraCaptureFailure}
@@ -276,7 +276,7 @@
  */
 typedef void (*ACameraCaptureSession_captureCallback_bufferLost)(
         void* context, ACameraCaptureSession* session,
-        ACaptureRequest* request, ACameraWindowType* window, int64_t frameNumber);
+        ACaptureRequest* request, ANativeWindow* window, int64_t frameNumber);
 
 /**
  * ACaptureCaptureSession_captureCallbacks structure used in
@@ -1088,7 +1088,7 @@
  * and no pre-allocation is done.</p>
  *
  * @param session the {@link ACameraCaptureSession} that needs to prepare output buffers.
- * @param window the {@link ACameraWindowType} for which the output buffers need to be prepared.
+ * @param window the {@link ANativeWindow} for which the output buffers need to be prepared.
  *
  * @return <ul><li>
  *             {@link ACAMERA_OK} if the method succeeds</li>
@@ -1102,7 +1102,7 @@
  */
 camera_status_t ACameraCaptureSession_prepareWindow(
     ACameraCaptureSession* session,
-    ACameraWindowType *window) __INTRODUCED_IN(34);
+    ANativeWindow *window) __INTRODUCED_IN(34);
 __END_DECLS
 
 #endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index de10eb3..fbd0ee1 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -364,7 +364,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionOutput_create(
-        ACameraWindowType* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(24);
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(24);
 
 /**
  * Free a ACaptureSessionOutput object.
@@ -705,7 +705,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionSharedOutput_create(
-        ACameraWindowType* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(28);
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(28);
 
 /**
  * Add a native window to shared ACaptureSessionOutput.
@@ -723,7 +723,7 @@
  *             ACaptureSessionOutput.</li></ul>
  */
 camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *output,
-        ACameraWindowType *anw) __INTRODUCED_IN(28);
+        ANativeWindow *anw) __INTRODUCED_IN(28);
 
 /**
  * Remove a native window from shared ACaptureSessionOutput.
@@ -739,7 +739,7 @@
  *             ACaptureSessionOutput.</li></ul>
  */
 camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *output,
-        ACameraWindowType* anw) __INTRODUCED_IN(28);
+        ANativeWindow* anw) __INTRODUCED_IN(28);
 
 /**
  * Create a new camera capture session similar to {@link ACameraDevice_createCaptureSession}. This
@@ -797,7 +797,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionPhysicalOutput_create(
-        ACameraWindowType* anw, const char* physicalId,
+        ANativeWindow* anw, const char* physicalId,
         /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(29);
 
 /**
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index cf29736..237d07b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -221,6 +221,24 @@
         /*out*/int32_t* numEntries, /*out*/const uint32_t** tags) __INTRODUCED_IN(24);
 
 /**
+ * Look up tag ID value for device-specific custom tags that are usable only
+ * for the particular device, by name. The name and type of the tag need to be
+ * discovered from some other source, such as the manufacturer. The ID value is
+ * stable during the lifetime of an application, but should be queried again after
+ * process restarts. This method can also be used to query tag values using the names
+ * for public tags which exist in the Java API, however it is just simpler and faster to
+ * use the values of the tags which exist in the ndk.
+ * @param metadata The {@link ACameraMetadata} of to query the tag value from.
+ * @param name The name of the tag being queried.
+ * @param tag The output tag assigned by this method.
+ *
+ * @return ACAMERA_OK only if the function call was successful.
+ */
+
+camera_status_t
+ACameraMetadata_getTagFromName(const ACameraMetadata* metadata, const char *name, uint32_t *tag)  __INTRODUCED_IN(35);
+
+/**
  * Create a copy of input {@link ACameraMetadata}.
  *
  * <p>The returned ACameraMetadata must be freed by the application by {@link ACameraMetadata_free}
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 9f9860b..c97059d 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -91,6 +91,7 @@
     ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_EXTENSION,
     ACAMERA_JPEGR,
+    ACAMERA_EFV,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -138,6 +139,7 @@
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
     ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
+    ACAMERA_EFV_START              = ACAMERA_EFV               << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -2701,7 +2703,9 @@
      * upright.</p>
      * <p>Camera devices may either encode this value into the JPEG EXIF header, or
      * rotate the image data to match this orientation. When the image data is rotated,
-     * the thumbnail data will also be rotated.</p>
+     * the thumbnail data will also be rotated. Additionally, in the case where the image data
+     * is rotated, <a href="https://developer.android.com/reference/android/media/Image.html#getWidth">Image#getWidth</a> and <a href="https://developer.android.com/reference/android/media/Image.html#getHeight">Image#getHeight</a>
+     * will not be updated to reflect the height and width of the rotated image.</p>
      * <p>Note that this orientation is relative to the orientation of the camera sensor, given
      * by ACAMERA_SENSOR_ORIENTATION.</p>
      * <p>To translate from the device orientation given by the Android sensor APIs for camera
@@ -4720,18 +4724,21 @@
      * </ul>
      * <p>should be interpreted in the effective after raw crop field-of-view coordinate system.
      * In this coordinate system,
-     * {preCorrectionActiveArraySize.left, preCorrectionActiveArraySize.top} corresponds to the
+     * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.left,
+     *  ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.top} corresponds to the
      * the top left corner of the cropped RAW frame and
-     * {preCorrectionActiveArraySize.right, preCorrectionActiveArraySize.bottom} corresponds to
+     * {ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.right,
+     *  ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.bottom} corresponds to
      * the bottom right corner. Client applications must use the values of the keys
      * in the CaptureResult metadata if present.</p>
-     * <p>Crop regions (android.scaler.CropRegion), AE/AWB/AF regions and face coordinates still
+     * <p>Crop regions ACAMERA_SCALER_CROP_REGION, AE/AWB/AF regions and face coordinates still
      * use the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate system as usual.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
      * @see ACAMERA_LENS_POSE_ROTATION
      * @see ACAMERA_LENS_POSE_TRANSLATION
+     * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_HOT_PIXEL_MAP
@@ -11539,6 +11546,7 @@
 
 
 
+
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/include/camera/NdkCameraWindowType.h b/camera/ndk/include/camera/NdkCameraWindowType.h
index 0838fba..2217528 100644
--- a/camera/ndk/include/camera/NdkCameraWindowType.h
+++ b/camera/ndk/include/camera/NdkCameraWindowType.h
@@ -41,14 +41,11 @@
  * camera2 NDK. This enables us to share the api definition headers and avoid
  * code duplication (since the VNDK variant doesn't use ANativeWindow unlike the
  * NDK variant).
+ * @deprecated No longer needed. Both NDK and VNDK use ANativeWindow now.
+ *             Use ANativeWindow directly.
  */
-#ifdef __ANDROID_VNDK__
-#include <cutils/native_handle.h>
-typedef const native_handle_t ACameraWindowType;
-#else
 #include <android/native_window.h>
 typedef ANativeWindow ACameraWindowType;
-#endif
 
 /** @} */
 
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index dc18544..5ccb510 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -99,7 +99,7 @@
  *
  * @see ACaptureRequest_addTarget
  */
-camera_status_t ACameraOutputTarget_create(ACameraWindowType* window,
+camera_status_t ACameraOutputTarget_create(ANativeWindow* window,
         ACameraOutputTarget** output) __INTRODUCED_IN(24);
 
 /**
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 4c54658..7d7868b 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -35,6 +35,7 @@
     ACameraMetadata_copy;
     ACameraMetadata_free;
     ACameraMetadata_getAllTags;
+    ACameraMetadata_getTagFromName; #introduced=35
     ACameraMetadata_getConstEntry;
     ACameraMetadata_isLogicalMultiCamera; # introduced=29
     ACameraMetadata_fromCameraMetadata; # introduced=30
diff --git a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h b/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
deleted file mode 100644
index 45098c3..0000000
--- a/camera/ndk/ndk_vendor/impl/ACameraCaptureSessionVendor.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "utils.h"
-
-#include <android/binder_auto_utils.h>
-#include <string>
-#include <set>
-
-using ::android::acam::utils::native_handle_ptr_wrapper;
-
-struct ACaptureSessionOutput {
-    explicit ACaptureSessionOutput(const native_handle_t* window, bool isShared = false,
-            const char* physicalCameraId = "") :
-            mWindow(window), mIsShared(isShared), mPhysicalCameraId(physicalCameraId) {};
-
-    bool operator == (const ACaptureSessionOutput& other) const {
-        return (mWindow == other.mWindow);
-    }
-
-    bool operator != (const ACaptureSessionOutput& other) const {
-        return mWindow != other.mWindow;
-    }
-
-    bool operator < (const ACaptureSessionOutput& other) const {
-        return mWindow < other.mWindow;
-    }
-
-    bool operator > (const ACaptureSessionOutput& other) const {
-        return mWindow > other.mWindow;
-    }
-
-    inline bool isWindowEqual(ACameraWindowType* window) const {
-        return mWindow == native_handle_ptr_wrapper(window);
-    }
-
-    // returns true if the window was successfully added, false otherwise.
-    inline bool addSharedWindow(ACameraWindowType* window) {
-        auto ret = mSharedWindows.insert(window);
-        return ret.second;
-    }
-
-    // returns the number of elements removed.
-    inline size_t removeSharedWindow(ACameraWindowType* window) {
-        return mSharedWindows.erase(window);
-    }
-
-    native_handle_ptr_wrapper mWindow;
-    std::set<native_handle_ptr_wrapper> mSharedWindows;
-    bool           mIsShared;
-    int            mRotation = CAMERA3_STREAM_ROTATION_0;
-    std::string mPhysicalCameraId;
-};
-
-
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 87102e4..3325da6 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -26,7 +26,7 @@
 #include <aidl/android/frameworks/cameraservice/device/CameraMetadata.h>
 #include <aidl/android/frameworks/cameraservice/device/OutputConfiguration.h>
 #include <aidl/android/frameworks/cameraservice/device/SessionConfiguration.h>
-#include <aidlcommonsupport/NativeHandle.h>
+#include <android/native_window_aidl.h>
 #include <inttypes.h>
 #include <map>
 #include <utility>
@@ -59,6 +59,7 @@
 using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
 using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
 using ::aidl::android::frameworks::cameraservice::device::SessionConfiguration;
+using ::aidl::android::view::Surface;
 using ::ndk::ScopedAStatus;
 
 // Static member definitions
@@ -231,8 +232,9 @@
         OutputConfiguration& outputStream = sessionConfig.outputStreams[index];
         outputStream.rotation = utils::convertToAidl(output.mRotation);
         outputStream.windowGroupId = -1;
-        outputStream.windowHandles.resize(output.mSharedWindows.size() + 1);
-        outputStream.windowHandles[0] = std::move(dupToAidl(output.mWindow));
+        auto& surfaces = outputStream.surfaces;
+        surfaces.reserve(output.mSharedWindows.size() + 1);
+        surfaces.emplace_back(output.mWindow);
         outputStream.physicalCameraId = output.mPhysicalCameraId;
         index++;
     }
@@ -298,12 +300,12 @@
 
     OutputConfiguration outConfig;
     outConfig.rotation = utils::convertToAidl(output->mRotation);
-    outConfig.windowHandles.resize(output->mSharedWindows.size() + 1);
-    outConfig.windowHandles[0] = std::move(dupToAidl(output->mWindow));
+    auto& surfaces = outConfig.surfaces;
+    surfaces.reserve(output->mSharedWindows.size() + 1);
+    surfaces.emplace_back(output->mWindow);
     outConfig.physicalCameraId = output->mPhysicalCameraId;
-    int i = 1;
     for (auto& anw : output->mSharedWindows) {
-        outConfig.windowHandles[i++] = std::move(dupToAidl(anw));
+        surfaces.emplace_back(anw);
     }
 
     auto remoteRet = mRemote->updateOutputConfiguration(streamId,
@@ -340,7 +342,7 @@
     return ACAMERA_OK;
 }
 
-camera_status_t CameraDevice::prepareLocked(ACameraWindowType *window) {
+camera_status_t CameraDevice::prepareLocked(ANativeWindow *window) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
         return ret;
@@ -387,18 +389,19 @@
     std::vector<int32_t> requestSurfaceIdxList;
 
     for (auto& outputTarget : request->targets->mOutputs) {
-        native_handle_ptr_wrapper anw = outputTarget.mWindow;
+        ANativeWindow *anw = outputTarget.mWindow;
         bool found = false;
         req->mSurfaceList.push_back(anw);
         // lookup stream/surface ID
         for (const auto& kvPair : mConfiguredOutputs) {
             int streamId = kvPair.first;
             const OutputConfiguration& outConfig = kvPair.second.second;
-            const auto& windowHandles = outConfig.windowHandles;
-            for (int surfaceId = 0; surfaceId < (int) windowHandles.size(); surfaceId++) {
+            const auto& surfaces = outConfig.surfaces;
+            for (int surfaceId = 0; surfaceId < (int) surfaces.size(); surfaceId++) {
                 // If two window handles point to the same native window,
                 // they have the same surfaces.
-                if (utils::isWindowNativeHandleEqual(anw, windowHandles[surfaceId])) {
+                auto& surface = surfaces[surfaceId];
+                if (anw == surface.get()) {
                     found = true;
                     requestStreamIdxList.push_back(streamId);
                     requestSurfaceIdxList.push_back(surfaceId);
@@ -410,7 +413,7 @@
             }
         }
         if (!found) {
-            ALOGE("Unconfigured output target %p in capture request!", anw.mWindow);
+            ALOGE("Unconfigured output target %p in capture request!", anw);
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
@@ -470,7 +473,7 @@
     }
     pRequest->targets = new ACameraOutputTargets();
     for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
-        native_handle_ptr_wrapper anw = req->mSurfaceList[i];
+        ANativeWindow *anw = req->mSurfaceList[i];
         ACameraOutputTarget outputTarget(anw);
         pRequest->targets->mOutputs.insert(std::move(outputTarget));
     }
@@ -637,20 +640,21 @@
         return ret;
     }
 
-    std::map<native_handle_ptr_wrapper, OutputConfiguration> handleToConfig;
+    std::map<ANativeWindow *, OutputConfiguration> windowToConfig;
     for (const auto& outConfig : outputs->mOutputs) {
-        native_handle_ptr_wrapper anw = outConfig.mWindow;
+        ANativeWindow *anw = outConfig.mWindow;
         OutputConfiguration outConfigInsert;
         outConfigInsert.rotation = utils::convertToAidl(outConfig.mRotation);
         outConfigInsert.windowGroupId = -1;
-        outConfigInsert.windowHandles.resize(outConfig.mSharedWindows.size() + 1);
-        outConfigInsert.windowHandles[0] = std::move(dupToAidl(anw));
+        auto& surfaces = outConfigInsert.surfaces;
+        surfaces.reserve(outConfig.mSharedWindows.size() + 1);
+        surfaces.emplace_back(anw);
         outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
-        handleToConfig.insert({anw, std::move(outConfigInsert)});
+        windowToConfig.insert({anw, std::move(outConfigInsert)});
     }
 
-    std::set<native_handle_ptr_wrapper> addSet;
-    for (auto& kvPair : handleToConfig) {
+    std::set<ANativeWindow *> addSet;
+    for (auto& kvPair : windowToConfig) {
         addSet.insert(kvPair.first);
     }
 
@@ -663,8 +667,8 @@
         auto& anw = outputPair.first;
         auto& configuredOutput = outputPair.second;
 
-        auto itr = handleToConfig.find(anw);
-        if (itr != handleToConfig.end() && (itr->second) == configuredOutput) {
+        auto itr = windowToConfig.find(anw);
+        if (itr != windowToConfig.end() && (itr->second) == configuredOutput) {
             deleteList.push_back(streamId);
         } else {
             addSet.erase(anw);
@@ -714,13 +718,13 @@
     // add new streams
     for (const auto &anw : addSet) {
         int32_t streamId;
-        auto itr = handleToConfig.find(anw);
+        auto itr = windowToConfig.find(anw);
         remoteRet = mRemote->createStream(itr->second, &streamId);
         CHECK_TRANSACTION_AND_RET(remoteRet, "createStream()")
         mConfiguredOutputs.insert(std::make_pair(streamId,
                                                  std::make_pair(anw,
                                                                 std::move(itr->second))));
-        handleToConfig.erase(itr);
+        windowToConfig.erase(itr);
     }
 
     AidlCameraMetadata aidlParams;
@@ -867,9 +871,9 @@
         // Get the surfaces corresponding to the error stream id, go through
         // them and try to match the surfaces in the corresponding
         // CaptureRequest.
-        const auto& errorWindowHandles =
-                outputPairIt->second.second.windowHandles;
-        for (const auto& errorWindowHandle : errorWindowHandles) {
+        const auto& errorSurfaces =
+                outputPairIt->second.second.surfaces;
+        for (const auto& errorSurface : errorSurfaces) {
             for (const auto &requestStreamAndWindowId :
                         request->mCaptureRequest.streamAndWindowIds) {
                 // Go through the surfaces in the capture request and see which
@@ -884,12 +888,11 @@
                     return;
                 }
 
-                const auto &requestWindowHandles =
-                        requestSurfacePairIt->second.second.windowHandles;
+                const auto &requestSurfaces = requestSurfacePairIt->second.second.surfaces;
+                auto& requestSurface = requestSurfaces[requestWindowId];
 
-                if (requestWindowHandles[requestWindowId] == errorWindowHandle) {
-                    const native_handle_t* anw = makeFromAidl(
-                            requestWindowHandles[requestWindowId]);
+                if (requestSurface == errorSurface) {
+                    const ANativeWindow *anw = requestSurface.get();
                     ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
                             getId(), anw, frameNumber);
 
@@ -1085,7 +1088,7 @@
                     if (onWindowPrepared == nullptr) {
                         return;
                     }
-                    native_handle_t* anw;
+                    ANativeWindow* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
                         ALOGE("%s: Cannot find ANativeWindow: %d!", __FUNCTION__, __LINE__);
@@ -1342,10 +1345,10 @@
                         return;
                     }
 
-                    native_handle_t* anw;
+                    ANativeWindow* anw;
                     found = msg->findPointer(kAnwKey, (void**) &anw);
                     if (!found) {
-                        ALOGE("%s: Cannot find native_handle_t!", __FUNCTION__);
+                        ALOGE("%s: Cannot find ANativeWindow!", __FUNCTION__);
                         return;
                     }
 
@@ -1359,7 +1362,6 @@
                     ACaptureRequest* request = allocateACaptureRequest(requestSp, id_cstr);
                     (*onBufferLost)(context, session.get(), request, anw, frameNumber);
                     freeACaptureRequest(request);
-                    native_handle_delete(anw); // clean up anw as it was copied from AIDL
                     break;
                 }
             }
@@ -1842,7 +1844,7 @@
         return ScopedAStatus::ok();
     }
     // We've found the window corresponding to the surface id.
-    const native_handle_t *anw = it->second.first.mWindow;
+    const ANativeWindow *anw = it->second.first;
     sp<AMessage> msg = new AMessage(kWhatPreparedCb, dev->mHandler);
     msg->setPointer(kContextKey, session->mPreparedCb.context);
     msg->setPointer(kAnwKey, (void *)anw);
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 6e0c772..b771d47 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -66,7 +66,6 @@
 using ::aidl::android::frameworks::cameraservice::service::CameraStatusAndId;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
 using ::android::AidlMessageQueue;
-using ::android::acam::utils::native_handle_ptr_wrapper;
 
 
 using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
@@ -197,7 +196,7 @@
 
     camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
 
-    camera_status_t prepareLocked(ACameraWindowType *window);
+    camera_status_t prepareLocked(ANativeWindow *window);
 
     // Since this writes to ICameraDeviceUser's fmq, clients must take care that:
     //   a) This function is called serially.
@@ -236,7 +235,7 @@
 
     // stream id -> pair of (ACameraWindowType* from application, OutputConfiguration used for
     // camera service)
-    std::map<int, std::pair<native_handle_ptr_wrapper, OutputConfiguration>> mConfiguredOutputs;
+    std::map<int, std::pair<ANativeWindow *, OutputConfiguration>> mConfiguredOutputs;
 
     // TODO: maybe a bool will suffice for synchronous implementation?
     std::atomic_bool mClosing;
diff --git a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h b/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
deleted file mode 100644
index fcb7e34..0000000
--- a/camera/ndk/ndk_vendor/impl/ACaptureRequestVendor.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "utils.h"
-
-using ::android::acam::utils::native_handle_ptr_wrapper;
-struct ACameraOutputTarget {
-    explicit ACameraOutputTarget(const native_handle_t* window) : mWindow(window) {};
-
-    bool operator == (const ACameraOutputTarget& other) const {
-        return mWindow == other.mWindow;
-    }
-    bool operator != (const ACameraOutputTarget& other) const {
-        return mWindow != other.mWindow;
-    }
-    bool operator < (const ACameraOutputTarget& other) const {
-        return mWindow < other.mWindow;
-    }
-    bool operator > (const ACameraOutputTarget& other) const {
-        return mWindow > other.mWindow;
-    }
-
-    native_handle_ptr_wrapper mWindow;
-};
diff --git a/camera/ndk/ndk_vendor/impl/utils.cpp b/camera/ndk/ndk_vendor/impl/utils.cpp
index 73a527b..3971c73 100644
--- a/camera/ndk/ndk_vendor/impl/utils.cpp
+++ b/camera/ndk/ndk_vendor/impl/utils.cpp
@@ -18,7 +18,6 @@
 
 #include "utils.h"
 
-#include <aidlcommonsupport/NativeHandle.h>
 #include <utils/Log.h>
 
 namespace android {
@@ -138,51 +137,6 @@
     return ret;
 }
 
-bool isWindowNativeHandleEqual(const native_handle_t *nh1, const native_handle_t *nh2) {
-    if (nh1->numFds !=0 || nh2->numFds !=0) {
-        ALOGE("Invalid window native handles being compared");
-        return false;
-    }
-    if (nh1->version != nh2->version || nh1->numFds != nh2->numFds ||
-        nh1->numInts != nh2->numInts) {
-        return false;
-    }
-    for (int i = 0; i < nh1->numInts; i++) {
-        if(nh1->data[i] != nh2->data[i]) {
-            return false;
-        }
-    }
-    return true;
-}
-
-bool isWindowNativeHandleEqual(const native_handle_t *nh1,
-                               const aidl::android::hardware::common::NativeHandle& nh2) {
-    native_handle_t* tempNh = makeFromAidl(nh2);
-    bool equal = isWindowNativeHandleEqual(nh1, tempNh);
-    native_handle_delete(tempNh);
-    return equal;
-}
-
-bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2) {
-    if (isWindowNativeHandleEqual(nh1, nh2)) {
-        return false;
-    }
-    if (nh1->numInts != nh2->numInts) {
-        return nh1->numInts < nh2->numInts;
-    }
-
-    for (int i = 0; i < nh1->numInts; i++) {
-        if (nh1->data[i] != nh2->data[i]) {
-            return nh1->data[i] < nh2->data[i];
-        }
-    }
-    return false;
-}
-
-bool isWindowNativeHandleGreaterThan(const native_handle_t *nh1, const native_handle_t *nh2) {
-    return !isWindowNativeHandleLessThan(nh1, nh2) && !isWindowNativeHandleEqual(nh1, nh2);
-}
-
 } // namespace utils
 } // namespace acam
 } // namespace android
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 7ad74ad..d0dd2fc 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -38,53 +38,14 @@
 using ::aidl::android::frameworks::cameraservice::device::OutputConfiguration;
 using ::aidl::android::frameworks::cameraservice::device::PhysicalCameraSettings;
 using ::aidl::android::frameworks::cameraservice::device::TemplateId;
-using ::aidl::android::hardware::common::NativeHandle;
 using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
 using AidlCameraMetadata = ::aidl::android::frameworks::cameraservice::device::CameraMetadata;
 using AidlCaptureRequest = ::aidl::android::frameworks::cameraservice::device::CaptureRequest;
 
-bool isWindowNativeHandleEqual(const native_handle_t *nh1, const native_handle_t *nh2);
-
-bool isWindowNativeHandleEqual(const native_handle_t* nh1, const NativeHandle& nh2);
-
-bool isWindowNativeHandleLessThan(const native_handle_t *nh1, const native_handle_t *nh2);
-
-// Convenience wrapper over isWindowNativeHandleLessThan and isWindowNativeHandleEqual
-bool isWindowNativeHandleGreaterThan(const native_handle_t *nh1, const native_handle_t *nh2);
-
-// Utility class so the native_handle_t can be compared with  its contents instead
-// of just raw pointer comparisons.
-struct native_handle_ptr_wrapper {
-    const native_handle_t *mWindow = nullptr;
-
-    native_handle_ptr_wrapper(const native_handle_t *nh) : mWindow(nh) { }
-
-    native_handle_ptr_wrapper() = default;
-
-    operator const native_handle_t *() const { return mWindow; }
-
-    bool operator ==(const native_handle_ptr_wrapper other) const {
-        return isWindowNativeHandleEqual(mWindow, other.mWindow);
-    }
-
-    bool operator != (const native_handle_ptr_wrapper& other) const {
-        return !isWindowNativeHandleEqual(mWindow, other.mWindow);
-    }
-
-    bool operator < (const native_handle_ptr_wrapper& other) const {
-        return isWindowNativeHandleLessThan(mWindow, other.mWindow);
-    }
-
-    bool operator > (const native_handle_ptr_wrapper& other) const {
-        return !isWindowNativeHandleGreaterThan(mWindow, other.mWindow);
-    }
-
-};
-
 // Utility class so that CaptureRequest can be stored by sp<>
 struct CaptureRequest: public RefBase {
   AidlCaptureRequest mCaptureRequest;
-  std::vector<native_handle_ptr_wrapper> mSurfaceList;
+  std::vector<ANativeWindow *> mSurfaceList;
   // Physical camera settings metadata is stored here, as the capture request
   // might not contain it. That's since, fmq might have consumed it.
   std::vector<PhysicalCameraSettings> mPhysicalCameraSettings;
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 74c6cad..0259359 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -31,8 +31,6 @@
 #include <stdio.h>
 
 #include <android/log.h>
-#include <android/hidl/manager/1.2/IServiceManager.h>
-#include <android/hidl/token/1.0/ITokenManager.h>
 #include <camera/NdkCameraError.h>
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraDevice.h>
@@ -40,7 +38,6 @@
 #include <hidl/ServiceManagement.h>
 #include <media/NdkImage.h>
 #include <media/NdkImageReader.h>
-#include <cutils/native_handle.h>
 #include <VendorTagDescriptor.h>
 
 namespace {
@@ -53,9 +50,7 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
-using android::hidl::manager::V1_0::IServiceManager;
-using android::hidl::token::V1_0::ITokenManager;
-using ConfiguredWindows = std::set<const native_handle_t *>;
+using ConfiguredWindows = std::set<ANativeWindow*>;
 
 class CameraHelper {
    public:
@@ -65,11 +60,11 @@
 
     struct PhysicalImgReaderInfo {
         const char* physicalCameraId;
-        const native_handle_t* anw;
+        ANativeWindow* anw;
     };
 
     // Retaining the error code in case the caller needs to analyze it.
-    std::variant<int, ConfiguredWindows> initCamera(const native_handle_t* imgReaderAnw,
+    std::variant<int, ConfiguredWindows> initCamera(ANativeWindow* imgReaderAnw,
             const std::vector<PhysicalImgReaderInfo>& physicalImgReaders,
             bool usePhysicalSettings, bool prepareWindows = false) {
         ConfiguredWindows configuredWindows;
@@ -109,7 +104,7 @@
         }
         configuredWindows.insert(mImgReaderAnw);
         std::vector<const char*> idPointerList;
-        std::set<const native_handle_t*> physicalStreamMap;
+        std::set<ANativeWindow*> physicalStreamMap;
         for (auto& physicalStream : physicalImgReaders) {
             ACaptureSessionOutput* sessionOutput = nullptr;
             ret = ACaptureSessionPhysicalOutput_create(physicalStream.anw,
@@ -301,7 +296,7 @@
 
 
    private:
-    static void onPreparedCb(void* obj, ACameraWindowType *anw, ACameraCaptureSession *session) {
+    static void onPreparedCb(void* obj, ANativeWindow *anw, ACameraCaptureSession *session) {
         CameraHelper* thiz = reinterpret_cast<CameraHelper*>(obj);
         thiz->handlePrepared(anw, session);
     }
@@ -317,7 +312,7 @@
         return ret;
     }
 
-    void handlePrepared(ACameraWindowType *anw, ACameraCaptureSession *session) {
+    void handlePrepared(ANativeWindow *anw, ACameraCaptureSession *session) {
         // Reduce the pending prepared count of anw by 1. If count is  0, remove the key.
         std::lock_guard<std::mutex> lock(mMutex);
         if (session != mSession) {
@@ -334,7 +329,7 @@
             mPendingPreparedCbs.erase(anw);
         }
     }
-    void incPendingPrepared(ACameraWindowType *anw) {
+    void incPendingPrepared(ANativeWindow *anw) {
         std::lock_guard<std::mutex> lock(mMutex);
         if ((mPendingPreparedCbs.find(anw) == mPendingPreparedCbs.end())) {
             mPendingPreparedCbs[anw] = 1;
@@ -344,13 +339,13 @@
     }
 
     // ANW -> pending prepared callbacks
-    std::unordered_map<ACameraWindowType *, int> mPendingPreparedCbs;
+    std::unordered_map<ANativeWindow*, int> mPendingPreparedCbs;
     ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
     ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
 
     ACameraCaptureSession_prepareCallback mPreparedCb = &onPreparedCb;
 
-    const native_handle_t* mImgReaderAnw = nullptr;  // not owned by us.
+    ANativeWindow* mImgReaderAnw = nullptr;  // not owned by us.
 
     // Camera device
     ACameraDevice* mDevice = nullptr;
@@ -484,7 +479,7 @@
     ~ImageReaderTestCase() {
         if (mImgReaderAnw) {
             AImageReader_delete(mImgReader);
-            // No need to call native_handle_t_release on imageReaderAnw
+            // No need to call AImageReader_release(mImgReaderAnw).
         }
     }
 
@@ -514,17 +509,18 @@
             return ret;
         }
 
-        ret = AImageReader_getWindowNativeHandle(mImgReader, &mImgReaderAnw);
+
+        ret = AImageReader_getWindow(mImgReader, &mImgReaderAnw);
         if (ret != AMEDIA_OK || mImgReaderAnw == nullptr) {
-            ALOGE("Failed to get native_handle_t from AImageReader, ret=%d, mImgReaderAnw=%p.", ret,
-                  mImgReaderAnw);
+            ALOGE("Failed to get ANativeWindow* from AImageReader, ret=%d, mImgReader=%p.", ret,
+                  mImgReader);
             return -1;
         }
 
         return 0;
     }
 
-    const native_handle_t* getNativeWindow() { return mImgReaderAnw; }
+    ANativeWindow* getNativeWindow() { return mImgReaderAnw; }
 
     int getAcquiredImageCount() {
         std::lock_guard<std::mutex> lock(mMutex);
@@ -657,7 +653,7 @@
     int mAcquiredImageCount{0};
 
     AImageReader* mImgReader = nullptr;
-    native_handle_t* mImgReaderAnw = nullptr;
+    ANativeWindow* mImgReaderAnw = nullptr;
 
     AImageReader_ImageListener mReaderAvailableCb{this, onImageAvailable};
     AImageReader_BufferRemovedListener mReaderDetachedCb{this, onBufferRemoved};
@@ -985,20 +981,12 @@
 
 
 
-TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
-    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
-    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
-        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
-    }
+TEST_F(AImageReaderVendorTest, CreateANativeWindow) {
     testBasicTakePictures(/*prepareSurfaces*/ false);
     testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
-    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
-    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
-        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
-    }
     for (auto & v2 : {true, false}) {
         testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
         testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 65b8b41..9aaac6a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: [
         "frameworks_av_camera_license",
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index bb963ab..e5f99be 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -43,6 +43,7 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 #include <gui/BufferItemConsumer.h>
@@ -77,29 +78,34 @@
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId) override {
+    virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         Mutex::Autolock l(mLock);
         mCameraStatuses[cameraId] = status;
         mCondition.broadcast();
         return binder::Status::ok();
-    };
+    }
 
-    virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) override {
+    virtual binder::Status onPhysicalCameraStatusChanged([[maybe_unused]] int32_t /*status*/,
+            [[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*physicalCameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // No op
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onTorchStatusChanged(int32_t status,
-            const std::string& cameraId) override {
+            const std::string& cameraId, [[maybe_unused]] int32_t /*deviceId*/) override {
         Mutex::Autolock l(mLock);
         mCameraTorchStatuses[cameraId] = status;
         mTorchCondition.broadcast();
         return binder::Status::ok();
-    };
+    }
 
-    virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
-            int32_t /*torchStrength*/) override {
+    virtual binder::Status onTorchStrengthLevelChanged(
+            [[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*torchStrength*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // No op
         return binder::Status::ok();
     }
@@ -109,13 +115,15 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageName*/) {
+    virtual binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageName*/,
+            [[maybe_unused]] int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+    virtual binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // No op
         return binder::Status::ok();
     }
@@ -133,7 +141,7 @@
             }
         }
         return true;
-    };
+    }
 
     bool waitForTorchState(int32_t status, int32_t cameraId) const {
         Mutex::Autolock l(mLock);
@@ -153,7 +161,7 @@
             foundStatus = (iter != mCameraTorchStatuses.end() && iter->second == status);
         }
         return true;
-    };
+    }
 
     int32_t getTorchStatus(int32_t cameraId) const {
         Mutex::Autolock l(mLock);
@@ -162,7 +170,7 @@
             return hardware::ICameraServiceListener::TORCH_STATUS_UNKNOWN;
         }
         return iter->second;
-    };
+    }
 
     int32_t getStatus(const std::string& cameraId) const {
         Mutex::Autolock l(mLock);
@@ -171,7 +179,7 @@
             return hardware::ICameraServiceListener::STATUS_UNKNOWN;
         }
         return iter->second;
-    };
+    }
 };
 
 // Callback implementation
@@ -230,7 +238,6 @@
         return binder::Status::ok();
     }
 
-
     virtual binder::Status onResultReceived(const CameraMetadata& metadata,
             const CaptureResultExtras& resultExtras,
             const std::vector<PhysicalCaptureResultInfo>& physicalResultInfos) {
@@ -296,7 +303,6 @@
         mStatusesHit.clear();
 
         return true;
-
     }
 
     void clearStatus() const {
@@ -307,7 +313,6 @@
     bool waitForIdle() const {
         return waitForStatus(IDLE);
     }
-
 };
 
 namespace {
@@ -324,7 +329,7 @@
         }
     };
     sp<DeathNotifier>         gDeathNotifier;
-}; // anonymous namespace
+} // anonymous namespace
 
 // Exercise basic binder calls for the camera service
 TEST(CameraServiceBinderTest, CheckBinderCameraService) {
@@ -342,7 +347,8 @@
     binder::Status res;
 
     int32_t numCameras = 0;
-    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId,
+            /*devicePolicy*/0, &numCameras);
     EXPECT_TRUE(res.isOk()) << res;
     EXPECT_LE(0, numCameras);
 
@@ -354,7 +360,7 @@
 
     EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
     for (const auto &it : statuses) {
-        listener->onStatusChanged(it.status, it.cameraId);
+        listener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
     }
 
     for (int32_t i = 0; i < numCameras; i++) {
@@ -372,7 +378,8 @@
         // Check metadata binder call
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -389,7 +396,7 @@
         res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
                 {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*out*/&device);
+                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -399,12 +406,12 @@
         if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
             // Check torch calls
             res = service->setTorchMode(cameraId,
-                    /*enabled*/true, callbacks);
+                    /*enabled*/true, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
             res = service->setTorchMode(cameraId,
-                    /*enabled*/false, callbacks);
+                    /*enabled*/false, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -433,7 +440,8 @@
             binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
                     {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                    /*overrideToPortrait*/false, /*out*/&device);
+                    /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+                    /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
         auto p = std::make_pair(callbacks, device);
@@ -466,10 +474,10 @@
         std::vector<hardware::CameraStatus> statuses;
         service->addListener(serviceListener, &statuses);
         for (const auto &it : statuses) {
-            serviceListener->onStatusChanged(it.status, it.cameraId);
+            serviceListener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
         }
         service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-                &numCameras);
+                kDefaultDeviceId, /*devicePolicy*/0, &numCameras);
     }
 
     virtual void TearDown() {
@@ -479,7 +487,6 @@
             closeDevice(p);
         }
     }
-
 };
 
 TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
@@ -647,8 +654,7 @@
 
         closeDevice(p);
     }
-
-};
+}
 
 TEST_F(CameraClientBinderTest, CheckBinderCaptureRequest) {
     sp<CaptureRequest> requestOriginal, requestParceled;
@@ -707,4 +713,4 @@
     EXPECT_TRUE(it->settings.exists(ANDROID_CONTROL_CAPTURE_INTENT));
     entry = it->settings.find(ANDROID_CONTROL_CAPTURE_INTENT);
     EXPECT_EQ(entry.data.u8[0], intent2);
-};
+}
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 1de7cb4..10f7f22 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -24,6 +24,7 @@
 #include <utils/Log.h>
 #include <camera/CameraMetadata.h>
 #include <camera/Camera.h>
+#include <camera/CameraUtils.h>
 #include <android/hardware/ICameraService.h>
 
 using namespace android;
@@ -31,7 +32,6 @@
 
 class CameraCharacteristicsPermission : public ::testing::Test {
 protected:
-
     CameraCharacteristicsPermission() : numCameras(0){}
     //Gtest interface
     void SetUp() override;
@@ -48,7 +48,8 @@
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            &numCameras);
     EXPECT_TRUE(rc.isOk());
 }
 
@@ -61,7 +62,6 @@
 // a camera permission.
 TEST_F(CameraCharacteristicsPermission, TestCameraPermission) {
     for (int32_t cameraId = 0; cameraId < numCameras; cameraId++) {
-
         std::string cameraIdStr = std::to_string(cameraId);
         bool isSupported = false;
         auto rc = mCameraService->supportsCameraApi(cameraIdStr,
@@ -75,7 +75,7 @@
         std::vector<int32_t> tagsNeedingPermission;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, &metadata);
+                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 3ae7659..56fcfa4 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -27,6 +27,7 @@
 #include <camera/CameraParameters.h>
 #include <camera/CameraMetadata.h>
 #include <camera/Camera.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <android/hardware/ICameraService.h>
 
@@ -84,7 +85,8 @@
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, &numCameras);
+            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            &numCameras);
     EXPECT_TRUE(rc.isOk());
 
     mComposerClient = new SurfaceComposerClient;
@@ -109,7 +111,6 @@
 
 void CameraZSLTests::dataCallback(int32_t msgType, const sp<IMemory>& /*data*/,
         camera_frame_metadata_t *) {
-
     switch (msgType) {
     case CAMERA_MSG_PREVIEW_FRAME: {
         Mutex::Autolock l(mPreviewLock);
@@ -127,7 +128,7 @@
     default:
         ALOGV("%s: msgType: %d", __FUNCTION__, msgType);
     }
-};
+}
 
 status_t CameraZSLTests::waitForPreviewStart() {
     status_t rc = NO_ERROR;
@@ -184,7 +185,7 @@
         CameraMetadata metadata;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                &metadata);
+                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -212,7 +213,8 @@
                 "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
                 hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, kDefaultDeviceId,
+                /*devicePolicy*/0, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
         CameraParameters params(cameraDevice->getParameters());
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index b74b7a1..bd97c39 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 package {
+    default_team: "trendy_team_camera_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_camera_license"
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
index 07efc20..8371905 100644
--- a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -16,14 +16,19 @@
 
 #include <CameraParameters.h>
 #include <CameraParameters2.h>
+#include <camera/StringUtils.h>
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <utils/String16.h>
 #include <camera/StringUtils.h>
 
+#include <functional>
+
 using namespace std;
 using namespace android;
 
+constexpr int8_t kMaxBytes = 20;
+
 string kValidFormats[] = {
         CameraParameters::PIXEL_FORMAT_YUV422SP,      CameraParameters::PIXEL_FORMAT_YUV420SP,
         CameraParameters::PIXEL_FORMAT_YUV422I,       CameraParameters::PIXEL_FORMAT_YUV420P,
@@ -34,26 +39,22 @@
 class CameraParametersFuzzer {
   public:
     void process(const uint8_t* data, size_t size);
-    ~CameraParametersFuzzer() {
-        delete mCameraParameters;
-        delete mCameraParameters2;
-    }
 
   private:
     void invokeCameraParameters();
     template <class type>
-    void initCameraParameters(type** obj);
+    void initCameraParameters(unique_ptr<type>& obj);
     template <class type>
-    void cameraParametersCommon(type* obj);
-    CameraParameters* mCameraParameters = nullptr;
-    CameraParameters2* mCameraParameters2 = nullptr;
+    void callCameraParametersAPIs(unique_ptr<type>& obj);
+    unique_ptr<CameraParameters> mCameraParameters;
+    unique_ptr<CameraParameters2> mCameraParameters2;
     FuzzedDataProvider* mFDP = nullptr;
 };
 
 template <class type>
-void CameraParametersFuzzer::initCameraParameters(type** obj) {
+void CameraParametersFuzzer::initCameraParameters(unique_ptr<type>& obj) {
     if (mFDP->ConsumeBool()) {
-        *obj = new type();
+        obj = make_unique<type>();
     } else {
         string params;
         if (mFDP->ConsumeBool()) {
@@ -61,94 +62,176 @@
             int32_t height = mFDP->ConsumeIntegral<int32_t>();
             int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
             int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
-            params = CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
+            params = mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                         : CameraParameters::KEY_SUPPORTED_VIDEO_SIZES;
             params += '=' + to_string(width) + 'x' + to_string(height) + ';';
             if (mFDP->ConsumeBool()) {
-                params += CameraParameters::KEY_PREVIEW_FPS_RANGE;
+                params += mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                              : CameraParameters::KEY_PREVIEW_FPS_RANGE;
                 params += '=' + to_string(minFps) + ',' + to_string(maxFps) + ';';
             }
             if (mFDP->ConsumeBool()) {
-                params += CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
+                params += mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                              : CameraParameters::KEY_SUPPORTED_PICTURE_SIZES;
                 params += '=' + to_string(width) + 'x' + to_string(height) + ';';
             }
             if (mFDP->ConsumeBool()) {
-                params += CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
-                params += '=' + mFDP->PickValueInArray(kValidFormats) + ';';
+                params += mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                              : CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS;
+                params += '=' +
+                          (mFDP->ConsumeBool() ? mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()
+                                               : mFDP->PickValueInArray(kValidFormats)) + ';';
             }
         } else {
-            params = mFDP->ConsumeRandomLengthString();
+            params = mFDP->ConsumeRandomLengthString(kMaxBytes);
         }
-        *obj = new type(toString8(params));
+        obj = make_unique<type>(toString8(params));
     }
 }
 
 template <class type>
-void CameraParametersFuzzer::cameraParametersCommon(type* obj) {
-    Vector<Size> supportedPreviewSizes;
-    obj->getSupportedPreviewSizes(supportedPreviewSizes);
-    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
-    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
-    obj->setPreviewSize(previewWidth, previewHeight);
-    obj->getPreviewSize(&previewWidth, &previewHeight);
-
+void CameraParametersFuzzer::callCameraParametersAPIs(unique_ptr<type>& obj) {
     Vector<Size> supportedVideoSizes;
-    obj->getSupportedVideoSizes(supportedVideoSizes);
-    if (supportedVideoSizes.size() != 0) {
-        int32_t videoWidth, videoHeight, preferredVideoWidth, preferredVideoHeight;
-        if (mFDP->ConsumeBool()) {
-            int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(0, supportedVideoSizes.size() - 1);
-            obj->setVideoSize(supportedVideoSizes[idx].width, supportedVideoSizes[idx].height);
-        } else {
-            videoWidth = mFDP->ConsumeIntegral<int32_t>();
-            videoHeight = mFDP->ConsumeIntegral<int32_t>();
-            obj->setVideoSize(videoWidth, videoHeight);
-        }
-        obj->getVideoSize(&videoWidth, &videoHeight);
-        obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth, &preferredVideoHeight);
-    }
-
-    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
-    obj->setPreviewFrameRate(fps);
-    obj->getPreviewFrameRate();
-    string previewFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
-                                               : mFDP->ConsumeRandomLengthString();
-    obj->setPreviewFormat(previewFormat.c_str());
-
-    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
-    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
-    Vector<Size> supportedPictureSizes;
-    obj->setPictureSize(pictureWidth, pictureHeight);
-    obj->getPictureSize(&pictureWidth, &pictureHeight);
-    obj->getSupportedPictureSizes(supportedPictureSizes);
-    string pictureFormat = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
-                                               : mFDP->ConsumeRandomLengthString();
-    obj->setPictureFormat(pictureFormat.c_str());
-    obj->getPictureFormat();
-
-    if (mFDP->ConsumeBool()) {
-        obj->dump();
-    } else {
-        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
-        Vector<String16> args = {};
-        obj->dump(fd, args);
-        close(fd);
+    while (mFDP->remaining_bytes()) {
+        auto callCameraUtilsAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    Vector<Size> supportedPreviewSizes;
+                    obj->getSupportedPreviewSizes(supportedPreviewSizes);
+                },
+                [&]() {
+                    int32_t previewWidth = mFDP->ConsumeIntegral<int32_t>();
+                    int32_t previewHeight = mFDP->ConsumeIntegral<int32_t>();
+                    obj->setPreviewSize(previewWidth, previewHeight);
+                },
+                [&]() {
+                    int32_t previewWidth, previewHeight;
+                    obj->getPreviewSize(&previewWidth, &previewHeight);
+                },
+                [&]() { obj->getSupportedVideoSizes(supportedVideoSizes); },
+                [&]() {
+                    int32_t videoWidth, videoHeight;
+                    if (supportedVideoSizes.size()) {
+                        int32_t idx = mFDP->ConsumeIntegralInRange<int32_t>(
+                                0, supportedVideoSizes.size() - 1);
+                        videoWidth = mFDP->ConsumeBool() ? supportedVideoSizes[idx].width
+                                                         : mFDP->ConsumeIntegral<int32_t>();
+                        videoHeight = mFDP->ConsumeBool() ? supportedVideoSizes[idx].height
+                                                          : mFDP->ConsumeIntegral<int32_t>();
+                        obj->setVideoSize(videoWidth, videoHeight);
+                    }
+                },
+                [&]() {
+                    int32_t videoWidth, videoHeight;
+                    obj->getVideoSize(&videoWidth, &videoHeight);
+                },
+                [&]() {
+                    int32_t preferredVideoWidth, preferredVideoHeight;
+                    obj->getPreferredPreviewSizeForVideo(&preferredVideoWidth,
+                                                         &preferredVideoHeight);
+                },
+                [&]() {
+                    int32_t fps = mFDP->ConsumeIntegral<int32_t>();
+                    obj->setPreviewFrameRate(fps);
+                },
+                [&]() { obj->getPreviewFrameRate(); },
+                [&]() {
+                    string previewFormat = mFDP->ConsumeBool()
+                                                   ? mFDP->PickValueInArray(kValidFormats)
+                                                   : mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->setPreviewFormat(previewFormat.c_str());
+                },
+                [&]() {
+                    int32_t pictureWidth = mFDP->ConsumeIntegral<int32_t>();
+                    int32_t pictureHeight = mFDP->ConsumeIntegral<int32_t>();
+                    obj->setPictureSize(pictureWidth, pictureHeight);
+                },
+                [&]() {
+                    int32_t pictureWidth, pictureHeight;
+                    obj->getPictureSize(&pictureWidth, &pictureHeight);
+                },
+                [&]() {
+                    Vector<Size> supportedPictureSizes;
+                    obj->getSupportedPictureSizes(supportedPictureSizes);
+                },
+                [&]() {
+                    string pictureFormat = mFDP->ConsumeBool()
+                                                   ? mFDP->PickValueInArray(kValidFormats)
+                                                   : mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->setPictureFormat(pictureFormat.c_str());
+                },
+                [&]() { obj->getPictureFormat(); },
+                [&]() {
+                    if (mFDP->ConsumeBool()) {
+                        obj->dump();
+                    } else {
+                        int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+                        Vector<String16> args = {};
+                        obj->dump(fd, args);
+                        close(fd);
+                    }
+                },
+                [&]() { obj->flatten(); },
+                [&]() {
+                    string key = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    float value = mFDP->ConsumeFloatingPoint<float>();
+                    obj->setFloat(key.c_str(), value);
+                },
+                [&]() {
+                    string key = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->getFloat(key.c_str());
+                },
+                [&]() { obj->getPreviewFormat(); },
+                [&]() {
+                    string key = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                    obj->remove(key.c_str());
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters>) {
+                        string format = mFDP->ConsumeBool()
+                                                ? mFDP->ConsumeRandomLengthString(kMaxBytes)
+                                                : mFDP->PickValueInArray(kValidFormats);
+                        mCameraParameters->previewFormatToEnum(format.c_str());
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters>) {
+                        mCameraParameters->isEmpty();
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters>) {
+                        Vector<int32_t> formats;
+                        mCameraParameters->getSupportedPreviewFormats(formats);
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters2>) {
+                        string key1 = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        string key2 = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        int32_t order;
+                        mCameraParameters2->compareSetOrder(key1.c_str(), key2.c_str(), &order);
+                    }
+                },
+                [&]() {
+                    if (std::is_same_v<type, CameraParameters2>) {
+                        int32_t minFps = mFDP->ConsumeIntegral<int32_t>();
+                        int32_t maxFps = mFDP->ConsumeIntegral<int32_t>();
+                        mCameraParameters2->setPreviewFpsRange(minFps, maxFps);
+                    }
+                },
+        });
+        callCameraUtilsAPIs();
     }
 }
 
 void CameraParametersFuzzer::invokeCameraParameters() {
-    initCameraParameters<CameraParameters>(&mCameraParameters);
-    cameraParametersCommon<CameraParameters>(mCameraParameters);
-    initCameraParameters<CameraParameters2>(&mCameraParameters2);
-    cameraParametersCommon<CameraParameters2>(mCameraParameters2);
-
-    int32_t minFPS, maxFPS;
-    mCameraParameters->getPreviewFpsRange(&minFPS, &maxFPS);
-    string format = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFormats)
-                                        : mFDP->ConsumeRandomLengthString();
-    mCameraParameters->previewFormatToEnum(format.c_str());
-    mCameraParameters->isEmpty();
-    Vector<int32_t> formats;
-    mCameraParameters->getSupportedPreviewFormats(formats);
+    if (mFDP->ConsumeBool()) {
+        initCameraParameters<CameraParameters>(mCameraParameters);
+        callCameraParametersAPIs(mCameraParameters);
+    } else {
+        initCameraParameters<CameraParameters2>(mCameraParameters2);
+        callCameraParametersAPIs(mCameraParameters2);
+    }
 }
 
 void CameraParametersFuzzer::process(const uint8_t* data, size_t size) {
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
index 494ec1b..5ad9530 100644
--- a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -44,7 +44,7 @@
     }
 
     for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
-        string id = fdp.ConsumeRandomLengthString();
+        string id = fdp.ConsumeRandomLengthString(kMaxBytes);
         if (fdp.ConsumeBool()) {
             parcelCamCaptureReq.writeString16(toString16(id));
         }
@@ -120,7 +120,11 @@
         }
     }
 
-    invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    if (fdp.ConsumeBool()) {
+        invokeReadWriteParcelsp<CaptureRequest>(captureRequest);
+    } else {
+        invokeNewReadWriteParcelsp<CaptureRequest>(captureRequest, fdp);
+    }
     invokeReadWriteNullParcelsp<CaptureRequest>(captureRequest);
     parcelCamCaptureReq.setDataPosition(0);
     captureRequest->readFromParcel(&parcelCamCaptureReq);
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index 2fe9a94..7046075 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -26,85 +26,122 @@
 using namespace android;
 using namespace android::hardware::camera2::params;
 
+constexpr int8_t kMaxLoopIterations = 100;
 constexpr int32_t kSizeMin = 0;
 constexpr int32_t kSizeMax = 1000;
 
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+class C2OutputConfigurationFuzzer {
+  public:
+    void process(const uint8_t* data, size_t size);
 
-    OutputConfiguration* outputConfiguration = nullptr;
+  private:
+    void invokeC2OutputConfigFuzzer();
+    unique_ptr<OutputConfiguration> getC2OutputConfig();
+    sp<IGraphicBufferProducer> createIGraphicBufferProducer();
+    FuzzedDataProvider* mFDP = nullptr;
+};
 
-    if (fdp.ConsumeBool()) {
-        outputConfiguration = new OutputConfiguration();
+sp<IGraphicBufferProducer> C2OutputConfigurationFuzzer::createIGraphicBufferProducer() {
+    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+    sp<SurfaceControl> surfaceControl = composerClient->createSurface(
+            static_cast<String8>(mFDP->ConsumeRandomLengthString(kMaxBytes).c_str()) /* name */,
+            mFDP->ConsumeIntegral<uint32_t>() /* width */,
+            mFDP->ConsumeIntegral<uint32_t>() /* height */,
+            mFDP->ConsumeIntegral<int32_t>() /* format */,
+            mFDP->ConsumeIntegral<int32_t>() /* flags */);
+    if (surfaceControl) {
+        sp<Surface> surface = surfaceControl->getSurface();
+        return surface->getIGraphicBufferProducer();
     } else {
-        int32_t rotation = fdp.ConsumeIntegral<int32_t>();
-        string physicalCameraId = fdp.ConsumeRandomLengthString();
-        int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
-        bool isShared = fdp.ConsumeBool();
-
-        if (fdp.ConsumeBool()) {
-            sp<IGraphicBufferProducer> iGBP = nullptr;
-            sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-            sp<SurfaceControl> surfaceControl = composerClient->createSurface(
-                    static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
-                    fdp.ConsumeIntegral<uint32_t>() /* width */,
-                    fdp.ConsumeIntegral<uint32_t>() /* height */,
-                    fdp.ConsumeIntegral<int32_t>() /* format */,
-                    fdp.ConsumeIntegral<int32_t>() /* flags */);
-            if (surfaceControl) {
-                sp<Surface> surface = surfaceControl->getSurface();
-                iGBP = surface->getIGraphicBufferProducer();
-            }
-            outputConfiguration = new OutputConfiguration(iGBP, rotation, physicalCameraId,
-                                                          surfaceSetID, isShared);
-            iGBP.clear();
-            composerClient.clear();
-            surfaceControl.clear();
-        } else {
-            size_t iGBPSize = fdp.ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
-            vector<sp<IGraphicBufferProducer>> iGBPs;
-            for (size_t idx = 0; idx < iGBPSize; ++idx) {
-                sp<IGraphicBufferProducer> iGBP = nullptr;
-                sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-                sp<SurfaceControl> surfaceControl = composerClient->createSurface(
-                        static_cast<String8>(fdp.ConsumeRandomLengthString().c_str()) /* name */,
-                        fdp.ConsumeIntegral<uint32_t>() /* width */,
-                        fdp.ConsumeIntegral<uint32_t>() /* height */,
-                        fdp.ConsumeIntegral<int32_t>() /* format */,
-                        fdp.ConsumeIntegral<int32_t>() /* flags */);
-                if (surfaceControl) {
-                    sp<Surface> surface = surfaceControl->getSurface();
-                    iGBP = surface->getIGraphicBufferProducer();
-                    iGBPs.push_back(iGBP);
-                }
-                iGBP.clear();
-                composerClient.clear();
-                surfaceControl.clear();
-            }
-            outputConfiguration = new OutputConfiguration(iGBPs, rotation, physicalCameraId,
-                                                          surfaceSetID, isShared);
-        }
+        sp<IGraphicBufferProducer> gbp;
+        return gbp;
     }
+}
 
-    outputConfiguration->getRotation();
-    outputConfiguration->getSurfaceSetID();
-    outputConfiguration->getSurfaceType();
-    outputConfiguration->getWidth();
-    outputConfiguration->getHeight();
-    outputConfiguration->isDeferred();
-    outputConfiguration->isShared();
-    outputConfiguration->getPhysicalCameraId();
+unique_ptr<OutputConfiguration> C2OutputConfigurationFuzzer::getC2OutputConfig() {
+    unique_ptr<OutputConfiguration> outputConfiguration = nullptr;
+    auto selectOutputConfigurationConstructor =
+            mFDP->PickValueInArray<const std::function<void()>>({
+                    [&]() { outputConfiguration = make_unique<OutputConfiguration>(); },
 
-    OutputConfiguration outputConfiguration2;
-    outputConfiguration->gbpsEqual(outputConfiguration2);
-    outputConfiguration->sensorPixelModesUsedEqual(outputConfiguration2);
-    outputConfiguration->gbpsLessThan(outputConfiguration2);
-    outputConfiguration->sensorPixelModesUsedLessThan(outputConfiguration2);
-    outputConfiguration->getGraphicBufferProducers();
-    sp<IGraphicBufferProducer> gbp;
-    outputConfiguration->addGraphicProducer(gbp);
-    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration);
-    invokeReadWriteParcel<OutputConfiguration>(outputConfiguration);
-    delete outputConfiguration;
+                    [&]() {
+                        int32_t rotation = mFDP->ConsumeIntegral<int32_t>();
+                        string physicalCameraId = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
+                        bool isShared = mFDP->ConsumeBool();
+                        sp<IGraphicBufferProducer> iGBP = createIGraphicBufferProducer();
+                        outputConfiguration = make_unique<OutputConfiguration>(
+                                iGBP, rotation, physicalCameraId, surfaceSetID, isShared);
+                    },
+
+                    [&]() {
+                        int32_t rotation = mFDP->ConsumeIntegral<int32_t>();
+                        string physicalCameraId = mFDP->ConsumeRandomLengthString(kMaxBytes);
+                        int32_t surfaceSetID = mFDP->ConsumeIntegral<int32_t>();
+                        bool isShared = mFDP->ConsumeBool();
+                        size_t iGBPSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
+                        vector<sp<IGraphicBufferProducer>> iGBPs;
+                        for (size_t idx = 0; idx < iGBPSize; ++idx) {
+                            sp<IGraphicBufferProducer> iGBP = createIGraphicBufferProducer();
+                            iGBPs.push_back(iGBP);
+                        }
+                        outputConfiguration = make_unique<OutputConfiguration>(
+                                iGBPs, rotation, physicalCameraId, surfaceSetID, isShared);
+                    },
+            });
+    selectOutputConfigurationConstructor();
+    return outputConfiguration;
+}
+
+void C2OutputConfigurationFuzzer::invokeC2OutputConfigFuzzer() {
+    unique_ptr<OutputConfiguration> outputConfiguration = getC2OutputConfig();
+    int8_t count = kMaxLoopIterations;
+    while (--count > 0) {
+    unique_ptr<OutputConfiguration> outputConfiguration2 = getC2OutputConfig();
+        auto callC2OutputConfAPIs = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() { outputConfiguration->getRotation(); },
+                [&]() { outputConfiguration->getSurfaceSetID(); },
+                [&]() { outputConfiguration->getSurfaceType(); },
+                [&]() { outputConfiguration->getWidth(); },
+                [&]() { outputConfiguration->getHeight(); },
+                [&]() { outputConfiguration->isDeferred(); },
+                [&]() { outputConfiguration->isShared(); },
+                [&]() { outputConfiguration->getPhysicalCameraId(); },
+                [&]() { outputConfiguration->gbpsEqual(*outputConfiguration2); },
+                [&]() { outputConfiguration->sensorPixelModesUsedEqual(*outputConfiguration2); },
+                [&]() { outputConfiguration->gbpsLessThan(*outputConfiguration2); },
+                [&]() { outputConfiguration->sensorPixelModesUsedLessThan(*outputConfiguration2); },
+                [&]() { outputConfiguration->getGraphicBufferProducers(); },
+                [&]() {
+                    sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
+                    outputConfiguration->addGraphicProducer(gbp);
+                },
+                [&]() { outputConfiguration->isMultiResolution(); },
+                [&]() { outputConfiguration->getColorSpace(); },
+                [&]() { outputConfiguration->getStreamUseCase(); },
+                [&]() { outputConfiguration->getTimestampBase(); },
+                [&]() { outputConfiguration->getMirrorMode(); },
+                [&]() { outputConfiguration->useReadoutTimestamp(); },
+        });
+        callC2OutputConfAPIs();
+    }
+    // Not keeping invokeReadWrite() APIs in while loop to avoid possible OOM.
+    invokeReadWriteNullParcel<OutputConfiguration>(outputConfiguration.get());
+    if (mFDP->ConsumeBool()) {
+        invokeReadWriteParcel<OutputConfiguration>(outputConfiguration.get());
+    } else {
+        invokeNewReadWriteParcel<OutputConfiguration>(outputConfiguration.get(), *mFDP);
+    }
+}
+
+void C2OutputConfigurationFuzzer::process(const uint8_t* data, size_t size) {
+    mFDP = new FuzzedDataProvider(data, size);
+    invokeC2OutputConfigFuzzer();
+    delete mFDP;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    C2OutputConfigurationFuzzer c2OutputConfigurationFuzzer;
+    c2OutputConfigurationFuzzer.process(data, size);
     return 0;
 }
diff --git a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
index dc40b0f..c588f11 100644
--- a/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2SubmitInfo_fuzzer.cpp
@@ -27,6 +27,10 @@
     SubmitInfo submitInfo;
     submitInfo.mRequestId = fdp.ConsumeIntegral<int32_t>();
     submitInfo.mLastFrameNumber = fdp.ConsumeIntegral<int64_t>();
-    invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    if (fdp.ConsumeBool()) {
+        invokeReadWriteParcel<SubmitInfo>(&submitInfo);
+    } else {
+        invokeNewReadWriteParcel<SubmitInfo>(&submitInfo, fdp);
+    }
     return 0;
 }
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index c2a7549..0812096 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -16,6 +16,7 @@
 
 #include <Camera.h>
 #include <CameraParameters.h>
+#include <CameraUtils.h>
 #include <binder/MemoryDealer.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/Surface.h>
@@ -115,7 +116,7 @@
                                hardware::ICameraService::USE_CALLING_PID,
                                /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                /*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
-                               &cameraDevice);
+                               kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
     } else {
         cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
                                mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
@@ -123,7 +124,8 @@
                                mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
                                /*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
                                /*overrideToPortrait*/ mFDP->ConsumeBool(),
-                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), &cameraDevice);
+                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), kDefaultDeviceId,
+                               /*devicePolicy*/0, &cameraDevice);
     }
 
     mCamera = Camera::create(cameraDevice);
@@ -150,13 +152,14 @@
     }
 
     int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
-    Camera::getNumberOfCameras();
+    Camera::getNumberOfCameras(kDefaultDeviceId, /*devicePolicy*/0);
     CameraInfo cameraInfo;
     cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
                                             : mFDP->ConsumeIntegral<int32_t>();
     cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
                                                  : mFDP->ConsumeIntegral<int32_t>();
-    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+                          /*devicePolicy*/0, &cameraInfo);
     mCamera->reconnect();
 
     sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
diff --git a/camera/tests/fuzzer/camera_utils_fuzzer.cpp b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
index 365305e..c816f82 100644
--- a/camera/tests/fuzzer/camera_utils_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_utils_fuzzer.cpp
@@ -112,7 +112,8 @@
             }
             string clientPackage = mFDP->ConsumeRandomLengthString(kMaxBytes);
 
-            cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+            cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage,
+                                            kDefaultDeviceId);
         }
 
         if (mFDP->ConsumeBool()) {
diff --git a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
index e14d9ce..3131f1d 100644
--- a/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_vendorTagDescriptor_fuzzer.cpp
@@ -29,6 +29,8 @@
 constexpr int32_t kRangeMin = 0;
 constexpr int32_t kRangeMax = 1000;
 constexpr int32_t kVendorTagDescriptorId = -1;
+constexpr int8_t kMinLoopIterations = 1;
+constexpr int8_t kMaxLoopIterations = 50;
 
 extern "C" {
 
@@ -95,39 +97,63 @@
     initVendorTagDescriptor();
 
     sp<VendorTagDescriptor> vdesc = new VendorTagDescriptor();
-    vdesc->copyFrom(*mVendorTagDescriptor);
-    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
-    VendorTagDescriptor::getGlobalVendorTagDescriptor();
 
-    int32_t tagCount = mVendorTagDescriptor->getTagCount();
-    if (tagCount > 0) {
-        uint32_t tagArray[tagCount];
-        mVendorTagDescriptor->getTagArray(tagArray);
-        uint32_t tag;
-        for (int32_t i = 0; i < tagCount; ++i) {
-            tag = tagArray[i];
-            get_local_camera_metadata_section_name_vendor_id(tag, kVendorTagDescriptorId);
-            get_local_camera_metadata_tag_name_vendor_id(tag, kVendorTagDescriptorId);
-            get_local_camera_metadata_tag_type_vendor_id(tag, kVendorTagDescriptorId);
-            mVendorTagDescriptor->getSectionIndex(tag);
-        }
-        mVendorTagDescriptor->getAllSectionNames();
+    int8_t count = mFDP->ConsumeIntegralInRange<int8_t>(kMinLoopIterations, kMaxLoopIterations);
+    while (--count > 0) {
+        auto callVendorTagDescriptor = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    int32_t tagCount = mVendorTagDescriptor->getTagCount();
+                    if (tagCount > 0) {
+                        uint32_t tagArray[tagCount];
+                        mVendorTagDescriptor->getTagArray(tagArray);
+                        uint32_t tag;
+                        for (int32_t i = 0; i < tagCount; ++i) {
+                            tag = tagArray[i];
+                            get_local_camera_metadata_section_name_vendor_id(
+                                    tag, kVendorTagDescriptorId);
+                            get_local_camera_metadata_tag_name_vendor_id(tag,
+                                                                         kVendorTagDescriptorId);
+                            get_local_camera_metadata_tag_type_vendor_id(tag,
+                                                                         kVendorTagDescriptorId);
+                            mVendorTagDescriptor->getSectionIndex(tag);
+                        }
+                    }
+                },
+                [&]() {
+                    if (mVendorTagDescriptor->getTagCount() > 0) {
+                        mVendorTagDescriptor->getAllSectionNames();
+                    }
+                },
+                [&]() { vdesc->copyFrom(*mVendorTagDescriptor); },
+                [&]() {
+                    VendorTagDescriptor::setAsGlobalVendorTagDescriptor(mVendorTagDescriptor);
+                },
+                [&]() { VendorTagDescriptor::getGlobalVendorTagDescriptor(); },
+                [&]() {
+                    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
+                    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
+                    uint32_t lookupTag;
+                    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
+                },
+                [&]() {
+                    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+                    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+                    int32_t indentation =
+                            mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
+                    mVendorTagDescriptor->dump(fd, verbosity, indentation);
+                    close(fd);
+                },
+        });
+        callVendorTagDescriptor();
     }
 
-    String8 name((mFDP->ConsumeRandomLengthString()).c_str());
-    String8 section((mFDP->ConsumeRandomLengthString()).c_str());
-    uint32_t lookupTag;
-    mVendorTagDescriptor->lookupTag(name, section, &lookupTag);
-
-    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
-    int32_t verbosity = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
-    int32_t indentation = mFDP->ConsumeIntegralInRange<int32_t>(kRangeMin, kRangeMax);
-    mVendorTagDescriptor->dump(fd, verbosity, indentation);
-
-    invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    // Do not keep invokeReadWrite() APIs in while loop to avoid possible OOM.
+    if (mFDP->ConsumeBool()) {
+        invokeReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor);
+    } else {
+        invokeNewReadWriteParcelsp<VendorTagDescriptor>(mVendorTagDescriptor, *mFDP);
+    }
     VendorTagDescriptor::clearGlobalVendorTagDescriptor();
-    vdesc.clear();
-    close(fd);
 }
 
 void VendorTagDescriptorFuzzer::invokeVendorTagDescriptorCache() {
@@ -135,36 +161,52 @@
     uint64_t id = mFDP->ConsumeIntegral<uint64_t>();
     initVendorTagDescriptor();
 
-    mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor);
-    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
-    VendorTagDescriptorCache::getGlobalVendorTagCache();
-    sp<VendorTagDescriptor> tagDesc;
-    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
-
-    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
-    if (tagCount > 0) {
-        uint32_t tagArray[tagCount];
-        mVendorTagDescriptorCache->getTagArray(tagArray, id);
-        uint32_t tag;
-        for (int32_t i = 0; i < tagCount; ++i) {
-            tag = tagArray[i];
-            get_local_camera_metadata_section_name_vendor_id(tag, id);
-            get_local_camera_metadata_tag_name_vendor_id(tag, id);
-            get_local_camera_metadata_tag_type_vendor_id(tag, id);
-        }
+    int8_t count = mFDP->ConsumeIntegralInRange<int8_t>(kMinLoopIterations, kMaxLoopIterations);
+    while (--count > 0) {
+        auto callVendorTagDescriptorCache = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() { mVendorTagDescriptorCache->addVendorDescriptor(id, mVendorTagDescriptor); },
+                [&]() {
+                    VendorTagDescriptorCache::setAsGlobalVendorTagCache(mVendorTagDescriptorCache);
+                },
+                [&]() { VendorTagDescriptorCache::getGlobalVendorTagCache(); },
+                [&]() {
+                    sp<VendorTagDescriptor> tagDesc;
+                    mVendorTagDescriptorCache->getVendorTagDescriptor(id, &tagDesc);
+                },
+                [&]() {
+                    int32_t tagCount = mVendorTagDescriptorCache->getTagCount(id);
+                    if (tagCount > 0) {
+                        uint32_t tagArray[tagCount];
+                        mVendorTagDescriptorCache->getTagArray(tagArray, id);
+                        uint32_t tag;
+                        for (int32_t i = 0; i < tagCount; ++i) {
+                            tag = tagArray[i];
+                            get_local_camera_metadata_section_name_vendor_id(tag, id);
+                            get_local_camera_metadata_tag_name_vendor_id(tag, id);
+                            get_local_camera_metadata_tag_type_vendor_id(tag, id);
+                        }
+                    }
+                },
+                [&]() {
+                    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
+                    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+                    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
+                    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
+                    close(fd);
+                },
+                [&]() { VendorTagDescriptorCache::isVendorCachePresent(id); },
+                [&]() { mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors(); },
+        });
+        callVendorTagDescriptorCache();
     }
 
-    int32_t fd = open("/dev/null", O_CLOEXEC | O_RDWR | O_CREAT);
-    int32_t verbosity = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
-    int32_t indentation = mFDP->ConsumeIntegralInRange<int>(kRangeMin, kRangeMax);
-    mVendorTagDescriptorCache->dump(fd, verbosity, indentation);
-
-    invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
-    VendorTagDescriptorCache::isVendorCachePresent(id);
-    mVendorTagDescriptorCache->getVendorIdsAndTagDescriptors();
+    // Do not keep invokeReadWrite() APIs in while loop to avoid possible OOM.
+    if (mFDP->ConsumeBool()) {
+        invokeReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache);
+    } else {
+        invokeNewReadWriteParcelsp<VendorTagDescriptorCache>(mVendorTagDescriptorCache, *mFDP);
+    }
     mVendorTagDescriptorCache->clearGlobalVendorTagCache();
-    tagDesc.clear();
-    close(fd);
 }
 
 void VendorTagDescriptorFuzzer::invokeVendorTagErrorConditions() {
@@ -177,26 +219,39 @@
         VendorTagDescriptor::createDescriptorFromOps(/*vOps*/ NULL, vDesc);
     } else {
         VendorTagDescriptor::createDescriptorFromOps(&vOps, vDesc);
-        int32_t tagCount = vDesc->getTagCount();
-        uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
-        uint32_t badTagArray[tagCount + 1];
-        vDesc->getTagArray(badTagArray);
-        vDesc->getSectionName(badTag);
-        vDesc->getTagName(badTag);
-        vDesc->getTagType(badTag);
-        VendorTagDescriptor::clearGlobalVendorTagDescriptor();
-        VendorTagDescriptor::getGlobalVendorTagDescriptor();
-        VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc);
+
+        int8_t count = mFDP->ConsumeIntegralInRange<int8_t>(kMinLoopIterations, kMaxLoopIterations);
+        while (--count > 0) {
+            int32_t tagCount = vDesc->getTagCount();
+            uint32_t badTag = mFDP->ConsumeIntegral<uint32_t>();
+            uint32_t badTagArray[tagCount + 1];
+            auto callVendorTagErrorConditions =
+                    mFDP->PickValueInArray<const std::function<void()>>({
+                            [&]() { vDesc->getTagArray(badTagArray); },
+                            [&]() { vDesc->getSectionName(badTag); },
+                            [&]() { vDesc->getTagName(badTag); },
+                            [&]() { vDesc->getTagType(badTag); },
+                            [&]() { VendorTagDescriptor::clearGlobalVendorTagDescriptor(); },
+                            [&]() { VendorTagDescriptor::getGlobalVendorTagDescriptor(); },
+                            [&]() { VendorTagDescriptor::setAsGlobalVendorTagDescriptor(vDesc); },
+                    });
+            callVendorTagErrorConditions();
+        }
         invokeReadWriteNullParcelsp<VendorTagDescriptor>(vDesc);
-        vDesc.clear();
     }
+    vDesc.clear();
 }
 
 void VendorTagDescriptorFuzzer::process(const uint8_t* data, size_t size) {
     mFDP = new FuzzedDataProvider(data, size);
-    invokeVendorTagDescriptor();
-    invokeVendorTagDescriptorCache();
-    invokeVendorTagErrorConditions();
+    while (mFDP->remaining_bytes()) {
+        auto invokeVendorTagDescriptorFuzzer = mFDP->PickValueInArray<const std::function<void()>>({
+                [&]() { invokeVendorTagDescriptor(); },
+                [&]() { invokeVendorTagDescriptorCache(); },
+                [&]() { invokeVendorTagErrorConditions(); },
+        });
+        invokeVendorTagDescriptorFuzzer();
+    }
     delete mFDP;
 }
 
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index a6b20cf..28670b1 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -117,6 +117,7 @@
 static bool gSizeSpecified = false;     // was size explicitly requested?
 static bool gWantInfoScreen = false;    // do we want initial info screen?
 static bool gWantFrameTime = false;     // do we want times on each frame?
+static bool gSecureDisplay = false;     // should we create a secure virtual display?
 static uint32_t gVideoWidth = 0;        // default width+height
 static uint32_t gVideoHeight = 0;
 static uint32_t gBitRate = 20000000;     // 20Mbps
@@ -362,7 +363,7 @@
         const sp<IGraphicBufferProducer>& bufferProducer,
         sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
-            String8("ScreenRecorder"), false /*secure*/);
+            String8("ScreenRecorder"), gSecureDisplay);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
@@ -1253,6 +1254,7 @@
         { "persistent-surface", no_argument,        NULL, 'p' },
         { "bframes",            required_argument,  NULL, 'B' },
         { "display-id",         required_argument,  NULL, 'd' },
+        { "capture-secure",     no_argument,        NULL, 'S' },
         { NULL,                 0,                  NULL, 0 }
     };
 
@@ -1372,6 +1374,9 @@
 
             fprintf(stderr, "Invalid physical display ID\n");
             return 2;
+        case 'S':
+            gSecureDisplay = true;
+            break;
         default:
             if (ic != '?') {
                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 7106d66..650a589 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -466,6 +466,12 @@
     mMetrics->SetAppPackageName(appPackageName);
     mMetrics->SetAppUid(AIBinder_getCallingUid());
     for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
+        CryptoSchemes schemes{};
+        auto err = mFactories[i]->getSupportedCryptoSchemes(&schemes);
+        if (!err.isOk() || !std::count(schemes.uuids.begin(), schemes.uuids.end(), uuidAidl)) {
+            continue;
+        }
+
         ::ndk::ScopedAStatus status =
                 mFactories[i]->createDrmPlugin(uuidAidl, appPackageNameAidl, &pluginAidl);
         if (status.isOk()) {
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index b1d4ad4..ec45e2f 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -6,20 +6,30 @@
 }
 
 aconfig_declarations {
+    name: "com.android.media.audioclient-aconfig",
+    package: "com.android.media.audioclient",
+    container: "system",
+    srcs: ["audioclient.aconfig"],
+}
+
+aconfig_declarations {
     name: "com.android.media.audioserver-aconfig",
     package: "com.android.media.audioserver",
+    container: "system",
     srcs: ["audioserver.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.audio-aconfig",
     package: "com.android.media.audio",
+    container: "system",
     srcs: ["audio.aconfig"],
 }
 
 aconfig_declarations {
     name: "com.android.media.aaudio-aconfig",
     package: "com.android.media.aaudio",
+    container: "system",
     srcs: ["aaudio.aconfig"],
 }
 
@@ -43,6 +53,18 @@
     name: "com.android.media.audio-aconfig-cc",
     aconfig_declarations: "com.android.media.audio-aconfig",
     defaults: ["audio-aconfig-cc-defaults"],
+    double_loadable: true,
+    host_supported: true,
+    product_available: true,
+    vendor_available: true,
+    // TODO(b/316909431) native_bridge_supported: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.btservices",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
 }
 
 cc_aconfig_library {
@@ -51,11 +73,23 @@
     defaults: ["audio-aconfig-cc-defaults"],
 }
 
+cc_aconfig_library {
+    name: "com.android.media.audioclient-aconfig-cc",
+    aconfig_declarations: "com.android.media.audioclient-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
 java_aconfig_library {
     name: "com.android.media.audio-aconfig-java",
     aconfig_declarations: "com.android.media.audio-aconfig",
 }
 
+// For CTS usage
+java_aconfig_library {
+    name: "com.android.media.audioserver-aconfig-java",
+    aconfig_declarations: "com.android.media.audioserver-aconfig",
+}
+
 // Framework available flags to follow
 // Care must be taken to avoid namespace conflicts.
 // These flags are accessible outside of the platform! Limit usage to @FlaggedApi wherever possible
@@ -63,22 +97,25 @@
 aconfig_declarations {
     name: "android.media.audio-aconfig",
     package: "android.media.audio",
+    container: "system",
     srcs: ["audio_framework.aconfig"],
-    visibility: ["//visibility:private"],
+    visibility: ["//frameworks/base/api"],
 }
 
 aconfig_declarations {
     name: "android.media.audiopolicy-aconfig",
     package: "android.media.audiopolicy",
+    container: "system",
     srcs: ["audiopolicy_framework.aconfig"],
-    visibility: ["//visibility:private"],
+    visibility: ["//frameworks/base/api"],
 }
 
 aconfig_declarations {
     name: "android.media.midi-aconfig",
     package: "android.media.midi",
+    container: "system",
     srcs: ["midi_flags.aconfig"],
-    visibility: ["//visibility:private"],
+    visibility: ["//frameworks/base/api"],
 }
 
 java_aconfig_library {
@@ -91,6 +128,11 @@
     name: "android.media.audiopolicy-aconfig-java",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
+    min_sdk_version: "VanillaIceCream",
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.car.framework",
+    ],
 }
 
 java_aconfig_library {
@@ -99,6 +141,12 @@
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
 }
 
+cc_aconfig_library {
+    name: "android.media.audiopolicy-aconfig-cc",
+    aconfig_declarations: "android.media.audiopolicy-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+}
+
 aconfig_declarations_group {
     name: "audio-framework-aconfig",
     java_aconfig_libraries: [
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index 7196525..c160109 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.aaudio"
+container: "system"
 
 flag {
     name: "sample_rate_conversion"
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 73cb8ca..17ce8df 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audio"
+container: "system"
 
 flag {
     name: "alarm_min_volume_zero"
@@ -12,6 +13,13 @@
 }
 
 flag {
+    name: "as_device_connection_failure"
+    namespace: "media_audio"
+    description: "AudioService handles device connection failures."
+    bug: "326597760"
+}
+
+flag {
     name: "bluetooth_mac_address_anonymization"
     namespace: "media_audio"
     description:
@@ -37,8 +45,55 @@
 }
 
 flag {
+    name: "ringer_mode_affects_alarm"
+    namespace: "media_audio"
+    description:
+        "Support a configuration where ringer mode affects alarm stream"
+    bug: "312456558"
+}
+
+flag {
+    name: "set_stream_volume_order"
+    namespace: "media_audio"
+    description:
+        "Fix race condition by adjusting the order when"
+        "setStreamVolume is calling into the BT stack"
+    bug: "329202581"
+}
+
+flag {
     name: "spatializer_offload"
     namespace: "media_audio"
     description: "Enable spatializer offload"
     bug: "307842941"
 }
+
+flag {
+    name: "spatializer_upmix"
+    namespace: "media_audio"
+    description: "Enable spatializer upmix"
+    bug: "323985367"
+}
+
+flag {
+    name: "stereo_spatialization"
+    namespace: "media_audio"
+    description: "Enable stereo channel mask for spatialization."
+    bug: "303920722"
+}
+
+flag {
+    name: "vgs_vss_sync_mute_order"
+    namespace: "media_audio"
+    description:
+        "When syncing the VGS to VSS we need to first adjust the"
+        "mute state before the index."
+    bug: "331849188"
+}
+
+flag {
+    name: "volume_refactoring"
+    namespace: "media_audio"
+    description: "Refactor the audio volume internal architecture logic"
+    bug: "324152869"
+}
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index b7ec093..0209e28 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -22,9 +22,40 @@
     bug: "302323921"
 }
 
+flag {
+    name: "feature_spatial_audio_headtracking_low_latency"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Define feature for low latency headtracking for SA"
+    bug: "324291076"
+}
+
+flag {
+    name: "focus_exclusive_with_recording"
+    is_exported: true
+    namespace: "media_audio"
+    description:
+        "Audio focus GAIN_TRANSIENT_EXCLUSIVE only mutes"
+        "notifications when the focus owner is also recording"
+    bug: "316414750"
+}
+
+# TODO remove
+flag {
+    name: "foreground_audio_control"
+    is_exported: true
+    namespace: "media_audio"
+    description:
+        "Audio focus gain requires FGS or delegation to "
+        "take effect"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
+
 # TODO remove
 flag {
     name: "focus_freeze_test_api"
+    is_exported: true
     namespace: "media_audio"
     description: "\
  AudioManager audio focus test APIs:\
@@ -38,15 +69,25 @@
 
 flag {
     name: "loudness_configurator_api"
+    is_exported: true
     namespace: "media_audio"
     description: "\
 Enable the API for providing loudness metadata and CTA-2075 \
 support."
     bug: "298463873"
+    is_exported: true
+}
+
+flag {
+    name: "mute_background_audio"
+    namespace: "media_audio"
+    description: "mute audio playing in background"
+    bug: "296232417"
 }
 
 flag {
     name: "sco_managed_by_audio"
+    is_exported: true
     namespace: "media_audio"
     description: "\
 Enable new implementation of headset profile device connection and\
@@ -54,3 +95,39 @@
     bug: "265057196"
 }
 
+flag {
+    name: "supported_device_types_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Surface new API method AudioManager.getSupportedDeviceTypes()"
+    bug: "307537538"
+}
+
+flag {
+    name: "ro_foreground_audio_control"
+    is_exported: true
+    namespace: "media_audio"
+    description:
+        "Audio focus gain requires FGS or delegation to "
+        "take effect"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "ro_volume_ringer_api_hardening"
+    namespace: "media_audio"
+    description: "Limit access to volume and ringer SDK APIs in AudioManager"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
+
+
+# TODO remove
+flag {
+    name: "volume_ringer_api_hardening"
+    namespace: "media_audio"
+    description: "Limit access to volume and ringer SDK APIs in AudioManager"
+    bug: "296232417"
+    is_fixed_read_only: true
+}
diff --git a/media/audio/aconfig/audioclient.aconfig b/media/audio/aconfig/audioclient.aconfig
new file mode 100644
index 0000000..a804834
--- /dev/null
+++ b/media/audio/aconfig/audioclient.aconfig
@@ -0,0 +1,16 @@
+# Flags for libaudioclient, and other native client libraries.
+#
+# Please add flags in alphabetical order.
+
+package: "com.android.media.audioclient"
+container: "system"
+
+flag {
+    name: "audiosystem_service_acquisition"
+    namespace: "media_audio"
+    description: "Clean up audiosystem service acquisition."
+    bug: "330358287"
+    metadata {
+      purpose: PURPOSE_BUGFIX
+    }
+}
diff --git a/media/audio/aconfig/audiopolicy_framework.aconfig b/media/audio/aconfig/audiopolicy_framework.aconfig
index 833730a..28b6c7f 100644
--- a/media/audio/aconfig/audiopolicy_framework.aconfig
+++ b/media/audio/aconfig/audiopolicy_framework.aconfig
@@ -4,10 +4,60 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.audiopolicy"
+container: "system"
+
+flag {
+    name: "audio_mix_ownership"
+    namespace: "media_audio"
+    description: "Improves ownership model of AudioMixes and the relationship between AudioPolicy and AudioMix."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "audio_mix_policy_ordering"
+    namespace: "media_audio"
+    description: "Orders AudioMixes per registered AudioPolicy."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
+
+flag {
+    name: "audio_mix_test_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Enable new Test APIs that provide access to registered AudioMixes on system server and native side."
+    bug: "309080867"
+    is_fixed_read_only: true
+}
 
 flag {
     name: "audio_policy_update_mixing_rules_api"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable AudioPolicy.updateMixingRules API for hot-swapping audio mixing rules."
     bug: "293874525"
 }
+
+flag {
+    name: "enable_fade_manager_configuration"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Enable Fade Manager Configuration support to determine fade properties"
+    bug: "307354764"
+}
+
+flag {
+    name: "multi_zone_audio"
+    namespace: "media_audio"
+    description: "Enable multi-zone audio support in audio product strategies."
+    bug: "316643994"
+}
+
+flag {
+    name: "record_audio_device_aware_permission"
+    namespace: "media_audio"
+    description: "Enable device-aware permission handling for RECORD_AUDIO permission"
+    bug: "291737188"
+    is_fixed_read_only: true
+}
\ No newline at end of file
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 21ea1a2..5c6504f 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -3,6 +3,7 @@
 # Please add flags in alphabetical order.
 
 package: "com.android.media.audioserver"
+container: "system"
 
 flag {
     name: "direct_track_reprioritization"
diff --git a/media/audio/aconfig/midi_flags.aconfig b/media/audio/aconfig/midi_flags.aconfig
index ff9238a..1620e1b 100644
--- a/media/audio/aconfig/midi_flags.aconfig
+++ b/media/audio/aconfig/midi_flags.aconfig
@@ -4,9 +4,11 @@
 # Please add flags in alphabetical order.
 
 package: "android.media.midi"
+container: "system"
 
 flag {
     name: "virtual_ump"
+    is_exported: true
     namespace: "media_audio"
     description: "Enable virtual UMP MIDI."
     bug: "291115176"
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index e3a9645..e74fb91 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 66ce5ea..2409894 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -1052,7 +1052,7 @@
             memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz);
             ++mNumInputFrames;
 
-            ALOGD("bytes generated %zu", encoded_packet->data.frame.sz);
+            ALOGV("bytes generated %zu", encoded_packet->data.frame.sz);
             uint32_t flags = 0;
             if (eos) {
                 flags |= C2FrameData::FLAG_END_OF_STREAM;
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
index 2054fe6..ccdde5e 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/Android.bp
@@ -15,6 +15,7 @@
 //
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
index 624aad2..2b1bca0 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/audio/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
index 0f07077..564de47 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
index cc019da..0640f02 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
index 40f5201..5e52fde 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/master/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
index ecc4f9d..d04c2f6 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_codec_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/Android.bp b/media/libaaudio/examples/Android.bp
index e2c1878..aa3ae5e 100644
--- a/media/libaaudio/examples/Android.bp
+++ b/media/libaaudio/examples/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/input_monitor/Android.bp b/media/libaaudio/examples/input_monitor/Android.bp
index fc55290..52a5914 100644
--- a/media/libaaudio/examples/input_monitor/Android.bp
+++ b/media/libaaudio/examples/input_monitor/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/loopback/Android.bp b/media/libaaudio/examples/loopback/Android.bp
index bde1024..6552113 100644
--- a/media/libaaudio/examples/loopback/Android.bp
+++ b/media/libaaudio/examples/loopback/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/examples/write_sine/Android.bp b/media/libaaudio/examples/write_sine/Android.bp
index 70b1764..fe78112 100644
--- a/media/libaaudio/examples/write_sine/Android.bp
+++ b/media/libaaudio/examples/write_sine/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index e148a53..fc8ad77 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index c3b32e6..9d9b574 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -2011,10 +2011,20 @@
         __INTRODUCED_IN(28);
 
 /**
- * Passes back the time at which a particular frame was presented.
+ * Returns the time at which a particular frame was played on a speaker or headset,
+ * or was recorded on a microphone.
+ *
  * This can be used to synchronize audio with video or MIDI.
  * It can also be used to align a recorded stream with a playback stream.
  *
+ * The framePosition is an index into the stream of audio data.
+ * The first frame played or recorded is at framePosition 0.
+ *
+ * These framePositions are the same units that you get from AAudioStream_getFramesRead()
+ * or AAudioStream_getFramesWritten().
+ * A "frame" is a set of audio sample values that are played simultaneously.
+ * For example, a stereo stream has two samples in a frame, left and right.
+ *
  * Timestamps are only valid when the stream is in {@link #AAUDIO_STREAM_STATE_STARTED}.
  * {@link #AAUDIO_ERROR_INVALID_STATE} will be returned if the stream is not started.
  * Note that because requestStart() is asynchronous, timestamps will not be valid until
@@ -2030,8 +2040,8 @@
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @param clockid CLOCK_MONOTONIC or CLOCK_BOOTTIME
- * @param framePosition pointer to a variable to receive the position
- * @param timeNanoseconds pointer to a variable to receive the time
+ * @param[out] framePosition pointer to a variable to receive the position
+ * @param[out] timeNanoseconds pointer to a variable to receive the time
  * @return {@link #AAUDIO_OK} or a negative error
  */
 AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* _Nonnull stream,
diff --git a/media/libaaudio/include/aaudio/AAudioTesting.h b/media/libaaudio/include/aaudio/AAudioTesting.h
index 01d97b6..d67ec70 100644
--- a/media/libaaudio/include/aaudio/AAudioTesting.h
+++ b/media/libaaudio/include/aaudio/AAudioTesting.h
@@ -49,12 +49,6 @@
 };
 typedef int32_t aaudio_policy_t;
 
-// Internal error codes. Only used by the framework.
-enum {
-    AAUDIO_INTERNAL_ERROR_BASE = -1000,
-    AAUDIO_ERROR_STANDBY,
-};
-
 /**
  * Control whether AAudioStreamBuilder_openStream() will use the new MMAP data path
  * or the older "Legacy" data path.
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 8aaa4a0..ebb7637 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index e1d517e..0c55fca 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -72,6 +72,12 @@
      */
     int32_t pull(void *destination, int32_t targetFramesToRead);
 
+    // Reset the entire graph so that volume ramps start at their
+    // target value and sample rate converters start with no phase offset.
+    void reset() {
+        mSink->pullReset();
+    }
+
     /**
      * Set numFramesToWrite frames from the source into the flowgraph.
      * Then, attempt to read targetFramesToRead from the flowgraph.
diff --git a/media/libaaudio/src/client/AudioEndpoint.cpp b/media/libaaudio/src/client/AudioEndpoint.cpp
index e780f4f..cd7679c 100644
--- a/media/libaaudio/src/client/AudioEndpoint.cpp
+++ b/media/libaaudio/src/client/AudioEndpoint.cpp
@@ -278,3 +278,9 @@
         mDataQueue->eraseMemory();
     }
 }
+
+void AudioEndpoint::eraseEmptyDataMemory(int32_t numFrames) {
+    if (mDataQueue != nullptr) {
+        mDataQueue->eraseEmptyMemory(numFrames);
+    }
+}
diff --git a/media/libaaudio/src/client/AudioEndpoint.h b/media/libaaudio/src/client/AudioEndpoint.h
index b117572..7e97c6a 100644
--- a/media/libaaudio/src/client/AudioEndpoint.h
+++ b/media/libaaudio/src/client/AudioEndpoint.h
@@ -107,6 +107,8 @@
      */
     void eraseDataMemory();
 
+    void eraseEmptyDataMemory(int32_t numFrames);
+
     void freeDataQueue() { mDataQueue.reset(); }
 
     void dump() const;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 52925d9..b2e93f0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -199,6 +199,7 @@
         if (getSampleRate() != getDeviceSampleRate()) {
             ALOGD("%s - skipping sample rate converter. SR = %d, Device SR = %d", __func__,
                     getSampleRate(), getDeviceSampleRate());
+            result = AAUDIO_ERROR_INVALID_RATE;
             goto error;
         }
     }
@@ -574,10 +575,20 @@
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
+    // For playback, sleep until all the audio data has played.
+    // Then clear the buffer to prevent noise.
+    prepareBuffersForStop();
+
     mClockModel.stop(AudioClock::getNanoseconds());
     setState(AAUDIO_STREAM_STATE_STOPPING);
     mAtomicInternalTimestamp.clear();
 
+#if 0
+    // Simulate very slow CPU, force race condition where the
+    // DSP keeps playing after we stop writing.
+    AudioClock::sleepForNanos(800 * AAUDIO_NANOS_PER_MILLISECOND);
+#endif
+
     result = mServiceInterface.stopStream(mServiceStreamHandleInfo);
     if (result == AAUDIO_ERROR_INVALID_HANDLE) {
         ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index a5981b1..20d55f9 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -123,6 +123,8 @@
 
     virtual void prepareBuffersForStart() {}
 
+    virtual void prepareBuffersForStop() {}
+
     virtual void advanceClientToMatchServerPosition(int32_t serverMargin) = 0;
 
     virtual void onFlushFromServer() {}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 5bac2ca..0427777 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -19,6 +19,8 @@
 
 #define ATRACE_TAG ATRACE_TAG_AUDIO
 
+#include <algorithm>
+
 #include <media/MediaMetricsItem.h>
 #include <utils/Trace.h>
 
@@ -100,10 +102,69 @@
 }
 
 void AudioStreamInternalPlay::prepareBuffersForStart() {
+    // Reset volume ramps to avoid a starting noise.
+    // This was called here instead of AudioStreamInternal so that
+    // it will be easier to backport.
+    mFlowGraph.reset();
     // Prevent stale data from being played.
     mAudioEndpoint->eraseDataMemory();
 }
 
+void AudioStreamInternalPlay::prepareBuffersForStop() {
+    // If this is a shared stream and the FIFO is being read by the mixer then
+    // we don't have to worry about the DSP reading past the valid data. We can skip all this.
+    if(!mAudioEndpoint->isFreeRunning()) {
+        return;
+    }
+    // Sleep until the DSP has read all of the data written.
+    int64_t validFramesInBuffer = getFramesWritten() - getFramesRead();
+    if (validFramesInBuffer >= 0) {
+        int64_t emptyFramesInBuffer = ((int64_t) getBufferCapacity()) - validFramesInBuffer;
+
+        // Prevent stale data from being played if the DSP is still running.
+        // Erase some of the FIFO memory in front of the DSP read cursor.
+        // Subtract one burst so we do not accidentally erase data that the DSP might be using.
+        int64_t framesToErase = std::max((int64_t) 0,
+                                         emptyFramesInBuffer - getFramesPerBurst());
+        mAudioEndpoint->eraseEmptyDataMemory(framesToErase);
+
+        // Sleep until we are confident the DSP has consumed all of the valid data.
+        // Sleep for one extra burst as a safety margin because the IsochronousClockModel
+        // is not perfectly accurate.
+        int64_t positionInEmptyMemory = getFramesWritten() + getFramesPerBurst();
+        int64_t timeAllConsumed = mClockModel.convertPositionToTime(positionInEmptyMemory);
+        int64_t durationAllConsumed = timeAllConsumed - AudioClock::getNanoseconds();
+        // Prevent sleeping for too long.
+        durationAllConsumed = std::min(200 * AAUDIO_NANOS_PER_MILLISECOND, durationAllConsumed);
+        AudioClock::sleepForNanos(durationAllConsumed);
+    }
+
+    // Erase all of the memory in case the DSP keeps going and wraps around.
+    mAudioEndpoint->eraseDataMemory();
+
+    // Wait for the last buffer to reach the DAC.
+    // This is because the expected behavior of stop() is that all data written to the stream
+    // should be played before the hardware actually shuts down.
+    // This is different than pause(), where we just end as soon as possible.
+    // This can be important when, for example, playing car navigation and
+    // you want the user to hear the complete instruction.
+    if (mAtomicInternalTimestamp.isValid()) {
+        // Use timestamps to calculate the latency between the DSP reading
+        // a frame and when it reaches the DAC.
+        // This code assumes that timestamps are accurate.
+        Timestamp timestamp = mAtomicInternalTimestamp.read();
+        int64_t dacPosition = timestamp.getPosition();
+        int64_t hardwareReadTime = mClockModel.convertPositionToTime(dacPosition);
+        int64_t hardwareLatencyNanos = timestamp.getNanoseconds() - hardwareReadTime;
+        ALOGD("%s() hardwareLatencyNanos = %lld", __func__,
+              (long long) hardwareLatencyNanos);
+        // Prevent sleeping for too long.
+        hardwareLatencyNanos = std::min(30 * AAUDIO_NANOS_PER_MILLISECOND,
+                                        hardwareLatencyNanos);
+        AudioClock::sleepForNanos(hardwareLatencyNanos);
+    }
+}
+
 void AudioStreamInternalPlay::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter() + serverMargin;
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter();
@@ -349,20 +410,26 @@
         // Call application using the AAudio callback interface.
         callbackResult = maybeCallDataCallback(mCallbackBuffer.get(), mCallbackFrames);
 
-        if (callbackResult == AAUDIO_CALLBACK_RESULT_CONTINUE) {
-            // Write audio data to stream. This is a BLOCKING WRITE!
-            result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
-            if ((result != mCallbackFrames)) {
-                if (result >= 0) {
-                    // Only wrote some of the frames requested. The stream can be disconnected
-                    // or timed out.
-                    processCommands();
-                    result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
-                }
-                maybeCallErrorCallback(result);
-                break;
+        // Write audio data to stream. This is a BLOCKING WRITE!
+        // Write data regardless of the callbackResult because we assume the data
+        // is valid even when the callback returns AAUDIO_CALLBACK_RESULT_STOP.
+        // Imagine a callback that is playing a large sound in menory.
+        // When it gets to the end of the sound it can partially fill
+        // the last buffer with the end of the sound, then zero pad the buffer, then return STOP.
+        // If the callback has no valid data then it should zero-fill the entire buffer.
+        result = write(mCallbackBuffer.get(), mCallbackFrames, timeoutNanos);
+        if ((result != mCallbackFrames)) {
+            if (result >= 0) {
+                // Only wrote some of the frames requested. The stream can be disconnected
+                // or timed out.
+                processCommands();
+                result = isDisconnected() ? AAUDIO_ERROR_DISCONNECTED : AAUDIO_ERROR_TIMEOUT;
             }
-        } else if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
+            maybeCallErrorCallback(result);
+            break;
+        }
+
+        if (callbackResult == AAUDIO_CALLBACK_RESULT_STOP) {
             ALOGD("%s(): callback returned AAUDIO_CALLBACK_RESULT_STOP", __func__);
             result = systemStopInternal();
             break;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index b51b5d0..4e14f18 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -66,6 +66,8 @@
 
     void prepareBuffersForStart() override;
 
+    void prepareBuffersForStop() override;
+
     void advanceClientToMatchServerPosition(int32_t serverMargin) override;
 
     void onFlushFromServer() override;
diff --git a/media/libaaudio/src/core/AudioGlobal.h b/media/libaaudio/src/core/AudioGlobal.h
index 6c22744..8af49b4 100644
--- a/media/libaaudio/src/core/AudioGlobal.h
+++ b/media/libaaudio/src/core/AudioGlobal.h
@@ -22,6 +22,14 @@
 
 namespace aaudio {
 
+// Internal error codes. Only used by the framework.
+enum {
+    AAUDIO_INTERNAL_ERROR_BASE = -1000,
+    AAUDIO_ERROR_STANDBY,
+    AAUDIO_ERROR_ALREADY_CLOSED,
+
+};
+
 aaudio_policy_t AudioGlobal_getMMapPolicy();
 aaudio_result_t AudioGlobal_setMMapPolicy(aaudio_policy_t policy);
 
diff --git a/media/libaaudio/src/fifo/FifoBuffer.cpp b/media/libaaudio/src/fifo/FifoBuffer.cpp
index 5c11882..f3e3bbd 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.cpp
+++ b/media/libaaudio/src/fifo/FifoBuffer.cpp
@@ -150,7 +150,7 @@
 
     getEmptyRoomAvailable(&wrappingBuffer);
 
-    // Read data in one or two parts.
+    // Write data in one or two parts.
     int partIndex = 0;
     while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
         fifo_frames_t framesToWrite = framesLeft;
@@ -192,3 +192,29 @@
         memset(getStorage(), 0, (size_t) numBytes);
     }
 }
+
+fifo_frames_t FifoBuffer::eraseEmptyMemory(fifo_frames_t numFrames) {
+    WrappingBuffer wrappingBuffer;
+    fifo_frames_t framesLeft = numFrames;
+
+    getEmptyRoomAvailable(&wrappingBuffer);
+
+    // Erase data in one or two parts.
+    int partIndex = 0;
+    while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
+        fifo_frames_t framesToWrite = framesLeft;
+        fifo_frames_t framesAvailable = wrappingBuffer.numFrames[partIndex];
+        if (framesAvailable > 0) {
+            if (framesToWrite > framesAvailable) {
+                framesToWrite = framesAvailable;
+            }
+            int32_t numBytes = convertFramesToBytes(framesToWrite);
+            memset(wrappingBuffer.data[partIndex], 0, numBytes);
+            framesLeft -= framesToWrite;
+        } else {
+            break;
+        }
+        partIndex++;
+    }
+    return numFrames - framesLeft; // number erased
+}
diff --git a/media/libaaudio/src/fifo/FifoBuffer.h b/media/libaaudio/src/fifo/FifoBuffer.h
index 7b0aca1..860ccad 100644
--- a/media/libaaudio/src/fifo/FifoBuffer.h
+++ b/media/libaaudio/src/fifo/FifoBuffer.h
@@ -115,6 +115,13 @@
      */
     void eraseMemory();
 
+    /**
+     * Clear some memory after the write pointer.
+     * This can be used to prevent the reader from accidentally reading stale data
+     * in case it is reading asynchronously.
+     */
+    fifo_frames_t eraseEmptyMemory(fifo_frames_t numFrames);
+
 protected:
 
     virtual uint8_t *getStorage() const = 0;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 59fdabc..d729047 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -69,16 +69,24 @@
     audio_channel_mask_t channelMask =
             AAudio_getChannelMaskForOpen(getChannelMask(), getSamplesPerFrame(), false /*isInput*/);
 
+    // Set flags based on selected parameters.
     audio_output_flags_t flags;
     aaudio_performance_mode_t perfMode = getPerformanceMode();
     switch(perfMode) {
-        case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY:
+        case AAUDIO_PERFORMANCE_MODE_LOW_LATENCY: {
             // Bypass the normal mixer and go straight to the FAST mixer.
-            // If the app asks for a sessionId then it means they want to use effects.
-            // So don't use RAW flag.
-            flags = (audio_output_flags_t) ((requestedSessionId == AAUDIO_SESSION_ID_NONE)
-                    ? (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_RAW)
-                    : (AUDIO_OUTPUT_FLAG_FAST));
+            // Some Usages need RAW mode so they can get the lowest possible latency.
+            // Other Usages should avoid RAW because it can interfere with
+            // dual sink routing or other features.
+            bool usageBenefitsFromRaw = getUsage() == AAUDIO_USAGE_GAME ||
+                    getUsage() == AAUDIO_USAGE_MEDIA;
+            // If an app does not ask for a sessionId then there will be no effects.
+            // So we can use the use RAW flag.
+            flags = (audio_output_flags_t) (((requestedSessionId == AAUDIO_SESSION_ID_NONE)
+                                             && usageBenefitsFromRaw)
+                                            ? (AUDIO_OUTPUT_FLAG_FAST | AUDIO_OUTPUT_FLAG_RAW)
+                                            : (AUDIO_OUTPUT_FLAG_FAST));
+        }
             break;
 
         case AAUDIO_PERFORMANCE_MODE_POWER_SAVING:
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 23cc28c..6aa04a8 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -248,3 +249,30 @@
     srcs: ["test_idle_disconnected_shared_stream.cpp"],
     shared_libs: ["libaaudio"],
 }
+
+cc_test {
+    name: "test_multiple_close_simultaneously",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "libaaudio_tests_defaults",
+    ],
+    srcs: ["test_multiple_close_simultaneously.cpp"],
+    shared_libs: [
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaaudio",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+    // This test will run 1 minute to ensure there is no crash happen.
+    // In that case, set the timeout as 2 minutes to allow the test to complete.
+    test_options: {
+        test_runner_options: [
+            {
+                name: "native-test-timeout",
+                value: "2m",
+            },
+        ],
+    },
+}
diff --git a/media/libaaudio/tests/test_multiple_close_simultaneously.cpp b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
new file mode 100644
index 0000000..f6351b6
--- /dev/null
+++ b/media/libaaudio/tests/test_multiple_close_simultaneously.cpp
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "test_multiple_close_simultaneously"
+
+#include <chrono>
+#include <condition_variable>
+#include <shared_mutex>
+#include <string>
+#include <thread>
+
+#include <gtest/gtest.h>
+
+#include <binder/IBinder.h>
+#include <binder/IServiceManager.h>
+#include <utils/Log.h>
+
+#include <aaudio/AAudio.h>
+#include <aaudio/IAAudioService.h>
+#include <aaudio/StreamRequest.h>
+#include <aaudio/StreamParameters.h>
+
+using namespace android;
+using namespace aaudio;
+
+#define AAUDIO_SERVICE_NAME "media.aaudio"
+
+static constexpr int THREAD_NUM = 2;
+static constexpr auto TEST_DURATION = std::chrono::minutes(1);
+
+static std::string sError;
+static bool sTestPassed = true;
+
+struct Signal {
+    std::atomic_int value{0};
+    std::shared_mutex lock;
+    std::condition_variable_any cv;
+};
+
+class AAudioServiceDeathRecipient : public IBinder::DeathRecipient {
+public:
+    void binderDied(const wp<IBinder>& who __unused) override {
+        sError = "AAudioService is dead";
+        ALOGE("%s", sError.c_str());
+        sTestPassed = false;
+    }
+};
+
+sp<IAAudioService> getAAudioService(const sp<IBinder::DeathRecipient>& recipient) {
+    auto sm = defaultServiceManager();
+    if (sm == nullptr) {
+        sError = "Cannot get service manager";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
+    if (binder == nullptr) {
+        sError = "Cannot get aaudio service";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    if (binder->linkToDeath(recipient) != NO_ERROR) {
+        sError = "Cannot link to binder death";
+        ALOGE("%s", sError.c_str());
+        return nullptr;
+    }
+    return interface_cast<IAAudioService>(binder);
+}
+
+void openAndMultipleClose(const sp<IAAudioService>& aaudioService) {
+    auto start = std::chrono::system_clock::now();
+    bool hasFailedOpening = false;
+    while (sTestPassed && std::chrono::system_clock::now() - start < TEST_DURATION) {
+        StreamRequest inRequest;
+        StreamParameters outParams;
+        int32_t handle = 0;
+        inRequest.attributionSource.uid = getuid();
+        inRequest.attributionSource.pid = getpid();
+        inRequest.attributionSource.token = sp<BBinder>::make();
+        auto status = aaudioService->openStream(inRequest, &outParams, &handle);
+        if (!status.isOk()) {
+            sError = "Cannot open stream, it can be caused by service death";
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+        if (handle <= 0) {
+            sError = "Cannot get stream handle after open, returned handle"
+                    + std::to_string(handle);
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+        hasFailedOpening = false;
+
+        Signal isReady;
+        Signal startWork;
+        Signal isCompleted;
+        std::unique_lock readyLock(isReady.lock);
+        std::unique_lock completedLock(isCompleted.lock);
+        for (int i = 0; i < THREAD_NUM; ++i) {
+            std::thread closeStream([aaudioService, handle, &isReady, &startWork, &isCompleted] {
+                isReady.value++;
+                isReady.cv.notify_one();
+                {
+                    std::shared_lock<std::shared_mutex> _l(startWork.lock);
+                    startWork.cv.wait(_l, [&startWork] { return startWork.value.load() == 1; });
+                }
+                int32_t result;
+                aaudioService->closeStream(handle, &result);
+                isCompleted.value++;
+                isCompleted.cv.notify_one();
+            });
+            closeStream.detach();
+        }
+        isReady.cv.wait(readyLock, [&isReady] { return isReady.value == THREAD_NUM; });
+        {
+            std::unique_lock startWorkLock(startWork.lock);
+            startWork.value.store(1);
+        }
+        startWork.cv.notify_all();
+        isCompleted.cv.wait_for(completedLock,
+                                std::chrono::milliseconds(1000),
+                                [&isCompleted] { return isCompleted.value == THREAD_NUM; });
+        if (isCompleted.value != THREAD_NUM) {
+            sError = "Close is not completed within 1 second";
+            ALOGE("%s", sError.c_str());
+            sTestPassed = false;
+            break;
+        }
+    }
+}
+
+TEST(test_multiple_close_simultaneously, open_multiple_close) {
+    const auto recipient = sp<AAudioServiceDeathRecipient>::make();
+    auto aaudioService = getAAudioService(recipient);
+    ASSERT_NE(nullptr, aaudioService) << sError;
+    openAndMultipleClose(aaudioService);
+    ASSERT_TRUE(sTestPassed) << sError;
+}
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index ad717fa..04a8a45 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -63,6 +64,7 @@
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudioutils",
@@ -121,12 +123,14 @@
         "latest_android_media_audio_common_types_cpp_shared",
     ],
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
+        "com.android.media.audioclient-aconfig-cc",
         "framework-permission-aidl-cpp",
         "libaudio_aidl_conversion_common_cpp",
         "libaudioclient_aidl_conversion",
@@ -365,6 +369,7 @@
     ],
     imports: [
         "audioclient-types-aidl",
+        "framework-permission-aidl",
     ],
     backend: {
         cpp: {
diff --git a/media/libaudioclient/AudioProductStrategy.cpp b/media/libaudioclient/AudioProductStrategy.cpp
index d9fd58c..1417182 100644
--- a/media/libaudioclient/AudioProductStrategy.cpp
+++ b/media/libaudioclient/AudioProductStrategy.cpp
@@ -60,9 +60,13 @@
 }
 
 // Keep in sync with android/media/audiopolicy/AudioProductStrategy#attributeMatches
-int AudioProductStrategy::attributesMatchesScore(const audio_attributes_t refAttributes,
-                                                 const audio_attributes_t clientAttritubes)
+int AudioProductStrategy::attributesMatchesScore(audio_attributes_t refAttributes,
+                                                 audio_attributes_t clientAttritubes)
 {
+    refAttributes.flags = static_cast<audio_flags_mask_t>(
+            refAttributes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
+    clientAttritubes.flags = static_cast<audio_flags_mask_t>(
+            clientAttritubes.flags & AUDIO_FLAGS_AFFECT_STRATEGY_SELECTION);
     if (refAttributes == clientAttritubes) {
         return MATCH_EQUALS;
     }
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 91bc700..f729e1b 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -130,11 +130,7 @@
 }
 
 AudioRecord::AudioRecord(const AttributionSourceState &client)
-    : mActive(false), mStatus(NO_INIT), mClientAttributionSource(client),
-      mSessionId(AUDIO_SESSION_ALLOCATE), mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT), mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE), mSelectedMicDirection(MIC_DIRECTION_UNSPECIFIED),
-      mSelectedMicFieldDimension(MIC_FIELD_DIMENSION_DEFAULT)
+    : mClientAttributionSource(client)
 {
 }
 
@@ -154,13 +150,7 @@
         audio_port_handle_t selectedDeviceId,
         audio_microphone_direction_t selectedMicDirection,
         float microphoneFieldDimension)
-    : mActive(false),
-      mStatus(NO_INIT),
-      mClientAttributionSource(client),
-      mSessionId(AUDIO_SESSION_ALLOCATE),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mProxy(nullptr)
+    : mClientAttributionSource(client)
 {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(mClientAttributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mClientAttributionSource.pid));
@@ -199,9 +189,6 @@
 }
 
 void AudioRecord::stopAndJoinCallbacks() {
-    // Prevent nullptr crash if it did not open properly.
-    if (mStatus != NO_ERROR) return;
-
     // Make sure that callback function exits in the case where
     // it is looping on buffer empty condition in obtainBuffer().
     // Otherwise the callback thread will never exit.
@@ -693,16 +680,27 @@
     AutoMutex lock(mLock);
     ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
             __func__, mPortId, deviceId, mSelectedDeviceId);
+
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
-            // stop capture so that audio policy manager does not reject the new instance start request
-            // as only one capture can be active at a time.
-            if (mAudioRecord != 0 && mActive) {
-                mAudioRecord->stop();
+            if (mActive) {
+                if (mSelectedDeviceId != mRoutedDeviceId) {
+                    // stop capture so that audio policy manager does not reject the new instance
+                    // start request as only one capture can be active at a time.
+                    if (mAudioRecord != 0) {
+                        mAudioRecord->stop();
+                    }
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                    mProxy->interrupt();
+                }
+            } else {
+                // if the track is idle, try to restore now and
+                // defer to next start if not possible
+                if (restoreRecord_l("setInputDevice") != OK) {
+                    android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
+                }
             }
-            android_atomic_or(CBLK_INVALID, &mCblk->mFlags);
-            mProxy->interrupt();
         }
     }
     return NO_ERROR;
@@ -1521,7 +1519,7 @@
             .set(AMEDIAMETRICS_PROP_WHERE, from)
             .record(); });
 
-    ALOGW("%s(%d): dead IAudioRecord, creating a new one from %s()", __func__, mPortId, from);
+    ALOGW("%s(%d) called from %s()", __func__, mPortId, from);
     ++mSequence;
 
     const int INITIAL_RETRIES = 3;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index a25d7ff..aa51652 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -22,6 +22,7 @@
 #include <android/media/IAudioPolicyService.h>
 #include <android/media/AudioMixUpdate.h>
 #include <android/media/BnCaptureStateListener.h>
+#include <android_media_audiopolicy.h>
 #include <binder/IServiceManager.h>
 #include <binder/ProcessState.h>
 #include <binder/IPCThreadState.h>
@@ -44,6 +45,8 @@
 
 // ----------------------------------------------------------------------------
 
+namespace audio_flags = android::media::audiopolicy;
+
 namespace android {
 using aidl_utils::statusTFromBinderStatus;
 using binder::Status;
@@ -1882,6 +1885,25 @@
     return statusTFromBinderStatus(aps->registerPolicyMixes(mixesAidl, registration));
 }
 
+status_t AudioSystem::getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) {
+    if (!audio_flags::audio_mix_test_api()) {
+        return INVALID_OPERATION;
+    }
+
+    const sp<IAudioPolicyService> aps = AudioSystem::get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    std::vector<::android::media::AudioMix> aidlMixes;
+    Status status = aps->getRegisteredPolicyMixes(&aidlMixes);
+
+    for (const auto& aidlMix : aidlMixes) {
+        AudioMix mix = VALUE_OR_RETURN_STATUS(aidl2legacy_AudioMix(aidlMix));
+        mixes.push_back(mix);
+    }
+
+    return statusTFromBinderStatus(status);
+}
+
 status_t AudioSystem::updatePolicyMixes(
         const std::vector<std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>&
                 mixesWithUpdates) {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 2afe80c..5d96d8e 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -233,25 +233,9 @@
     return NO_ERROR;
 }
 
-AudioTrack::AudioTrack() : AudioTrack(AttributionSourceState())
-{
-}
-
 AudioTrack::AudioTrack(const AttributionSourceState& attributionSource)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mRoutedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mClientAttributionSource(attributionSource),
-      mAudioTrackCallback(new AudioTrackCallback())
+    : mClientAttributionSource(attributionSource)
 {
-    mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
-    mAttributes.usage = AUDIO_USAGE_UNKNOWN;
-    mAttributes.flags = AUDIO_FLAG_NONE;
-    strcpy(mAttributes.tags, "");
 }
 
 AudioTrack::AudioTrack(
@@ -271,21 +255,12 @@
         bool doNotReconnect,
         float maxRequiredSpeed,
         audio_port_handle_t selectedDeviceId)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mAudioTrackCallback(new AudioTrackCallback())
 {
-    mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
-
-    // make_unique does not aggregate init until c++20
-    mSetParams = std::unique_ptr<SetParams>{
-            new SetParams{streamType, sampleRate, format, channelMask, frameCount, flags, callback,
-                          notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/,
-                          sessionId, transferType, offloadInfo, attributionSource, pAttributes,
-                          doNotReconnect, maxRequiredSpeed, selectedDeviceId}};
+    mSetParams = std::make_unique<SetParams>(
+        streamType, sampleRate, format, channelMask, frameCount, flags, callback,
+        notificationFrames, nullptr /*sharedBuffer*/, false /*threadCanCallJava*/,
+        sessionId, transferType, offloadInfo, attributionSource, pAttributes,
+        doNotReconnect, maxRequiredSpeed, selectedDeviceId);
 }
 
 namespace {
@@ -400,9 +375,6 @@
 }
 
 void AudioTrack::stopAndJoinCallbacks() {
-    // Prevent nullptr crash if it did not open properly.
-    if (mStatus != NO_ERROR) return;
-
     // Make sure that callback function exits in the case where
     // it is looping on buffer full condition in obtainBuffer().
     // Otherwise the callback thread will never exit.
@@ -919,6 +891,7 @@
     const int64_t beginNs = systemTime();
 
     AutoMutex lock(mLock);
+    if (mProxy == nullptr) return;  // not successfully initialized.
     mediametrics::Defer defer([&]() {
         mediametrics::LogItem(mMetricsId)
             .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
@@ -2873,7 +2846,9 @@
     if (!forceRestore &&
         (isOffloadedOrDirect_l() || mDoNotReconnect)) {
         // FIXME re-creation of offloaded and direct tracks is not yet implemented;
-        // reconsider enabling for linear PCM encodings when position can be preserved.
+        // Disabled since (1) timestamp correction is not implemented for non-PCM and
+        // (2) We pre-empt existing direct tracks on resource constraint, so these tracks
+        // shouldn't reconnect.
         result = DEAD_OBJECT;
         return result;
     }
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 48f8992..aa5c840 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -923,6 +923,7 @@
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
         const sp<AudioFlingerServerAdapter::Delegate>& delegate) : mDelegate(delegate) {
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 status_t AudioFlingerServerAdapter::onTransact(uint32_t code,
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 60b08fa..441e329 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -242,6 +242,8 @@
     legacy.mCbFlags = VALUE_OR_RETURN(aidl2legacy_AudioMixCallbackFlag_uint32_t_mask(aidl.cbFlags));
     legacy.mAllowPrivilegedMediaPlaybackCapture = aidl.allowPrivilegedMediaPlaybackCapture;
     legacy.mVoiceCommunicationCaptureAllowed = aidl.voiceCommunicationCaptureAllowed;
+    legacy.mToken = aidl.mToken;
+    legacy.mVirtualDeviceId = aidl.mVirtualDeviceId;
     return legacy;
 }
 
@@ -265,6 +267,8 @@
     aidl.cbFlags = VALUE_OR_RETURN(legacy2aidl_uint32_t_AudioMixCallbackFlag_mask(legacy.mCbFlags));
     aidl.allowPrivilegedMediaPlaybackCapture = legacy.mAllowPrivilegedMediaPlaybackCapture;
     aidl.voiceCommunicationCaptureAllowed = legacy.mVoiceCommunicationCaptureAllowed;
+    aidl.mToken = legacy.mToken;
+    aidl.mVirtualDeviceId = legacy.mVirtualDeviceId;
     return aidl;
 }
 
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index 9c4ccb8..e213f08 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -872,6 +872,18 @@
                         { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
           .repeatCnt = 3,
           .repeatSegment = 0 },                              // TONE_NZ_CALL_WAITING
+        { .segments = { { .duration = 500, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 },                             // TONE_MY_CONGESTION
+        { .segments = { { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+                        { .duration = 400, .waveFreq = { 425, 0 }, 0, 0 },
+                        { .duration = 2000, .waveFreq = { 0 }, 0, 0},
+                        { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+          .repeatCnt = ToneGenerator::TONEGEN_INF,
+          .repeatSegment = 0 }                              // TONE_MY_RINGTONE
 };
 
 // Used by ToneGenerator::getToneForRegion() to convert user specified supervisory tone type
@@ -976,6 +988,16 @@
             TONE_SUP_ERROR,               // TONE_SUP_ERROR
             TONE_NZ_CALL_WAITING,         // TONE_SUP_CALL_WAITING
             TONE_GB_RINGTONE              // TONE_SUP_RINGTONE
+        },
+        {   // MALAYSIA
+            TONE_SUP_DIAL,                // TONE_SUP_DIAL
+            TONE_SUP_BUSY,                // TONE_SUP_BUSY
+            TONE_MY_CONGESTION,           // TONE_SUP_CONGESTION
+            TONE_SUP_RADIO_ACK,           // TONE_SUP_RADIO_ACK
+            TONE_SUP_RADIO_NOTAVAIL,      // TONE_SUP_RADIO_NOTAVAIL
+            TONE_SUP_ERROR,               // TONE_SUP_ERROR
+            TONE_SUP_CALL_WAITING,        // TONE_SUP_CALL_WAITING
+            TONE_MY_RINGTONE              // TONE_SUP_RINGTONE
         }
 };
 
@@ -1055,6 +1077,8 @@
         mRegion = TAIWAN;
     } else if (strstr(value, "nz") != NULL) {
         mRegion = NZ;
+    } else if (strstr(value, "my") != NULL) {
+        mRegion = MY;
     } else {
         mRegion = CEPT;
     }
diff --git a/media/libaudioclient/aidl/android/media/AudioMix.aidl b/media/libaudioclient/aidl/android/media/AudioMix.aidl
index 88b0450..bb8537d 100644
--- a/media/libaudioclient/aidl/android/media/AudioMix.aidl
+++ b/media/libaudioclient/aidl/android/media/AudioMix.aidl
@@ -39,4 +39,8 @@
     boolean allowPrivilegedMediaPlaybackCapture;
     /** Indicates if the caller can capture voice communication output */
     boolean voiceCommunicationCaptureAllowed;
+    /** Identifies the owner of the AudioPolicy that this AudioMix belongs to */
+    IBinder mToken;
+    /** Indicates the Id of the VirtualDevice this AudioMix was registered for */
+    int mVirtualDeviceId;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 52c8da0..633493c 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -263,6 +263,8 @@
 
     void registerPolicyMixes(in AudioMix[] mixes, boolean registration);
 
+    List<AudioMix> getRegisteredPolicyMixes();
+
     void updatePolicyMixes(in AudioMixUpdate[] updates);
 
     void setUidDeviceAffinities(int /* uid_t */ uid, in AudioDevice[] devices);
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 02c865d..6cfccd6 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -1,4 +1,8 @@
 /*
+package {
+    default_team: "trendy_team_media_framework_audio",
+}
+
  * Copyright (C) 2022 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/media/libaudioclient/fuzzer/Android.bp b/media/libaudioclient/fuzzer/Android.bp
index 12eedca..a95c700 100644
--- a/media/libaudioclient/fuzzer/Android.bp
+++ b/media/libaudioclient/fuzzer/Android.bp
@@ -15,6 +15,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioclient/include/media/AudioPolicy.h b/media/libaudioclient/include/media/AudioPolicy.h
index ec35e93..b190fba 100644
--- a/media/libaudioclient/include/media/AudioPolicy.h
+++ b/media/libaudioclient/include/media/AudioPolicy.h
@@ -18,6 +18,7 @@
 #ifndef ANDROID_AUDIO_POLICY_H
 #define ANDROID_AUDIO_POLICY_H
 
+#include <binder/IBinder.h>
 #include <binder/Parcel.h>
 #include <media/AudioDeviceTypeAddr.h>
 #include <system/audio.h>
@@ -127,6 +128,8 @@
     audio_devices_t mDeviceType;
     String8         mDeviceAddress;
     uint32_t        mCbFlags; // flags indicating which callbacks to use, see kCbFlag*
+    sp<IBinder>     mToken;
+    uint32_t        mVirtualDeviceId;
     /** Ignore the AUDIO_FLAG_NO_MEDIA_PROJECTION */
     bool            mAllowPrivilegedMediaPlaybackCapture = false;
     /** Indicates if the caller can capture voice communication output */
diff --git a/media/libaudioclient/include/media/AudioProductStrategy.h b/media/libaudioclient/include/media/AudioProductStrategy.h
index fcbb019..2505b11 100644
--- a/media/libaudioclient/include/media/AudioProductStrategy.h
+++ b/media/libaudioclient/include/media/AudioProductStrategy.h
@@ -58,11 +58,11 @@
      * @return {@code INVALID_SCORE} if not matching, {@code MATCH_ON_DEFAULT_SCORE} if matching
      * to default strategy, non zero positive score if matching a strategy.
      */
-    static int attributesMatchesScore(const audio_attributes_t refAttributes,
-                                      const audio_attributes_t clientAttritubes);
+    static int attributesMatchesScore(audio_attributes_t refAttributes,
+                                      audio_attributes_t clientAttritubes);
 
-    static bool attributesMatches(const audio_attributes_t refAttributes,
-                                      const audio_attributes_t clientAttritubes) {
+    static bool attributesMatches(audio_attributes_t refAttributes,
+                                  audio_attributes_t clientAttritubes) {
         return attributesMatchesScore(refAttributes, clientAttritubes) > 0;
     }
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 00f2c7a..d4479ef 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -681,7 +681,7 @@
 
     // Current client state:  false = stopped, true = active.  Protected by mLock.  If more states
     // are added, consider changing this to enum State { ... } mState as in AudioTrack.
-    bool                    mActive;
+    bool mActive = false;
 
     // for client callback handler
 
@@ -708,7 +708,7 @@
     Modulo<uint32_t>        mNewPosition;           // in frames
     uint32_t                mUpdatePeriod;          // in frames, zero means no EVENT_NEW_POS
 
-    status_t                mStatus;
+    status_t mStatus = NO_INIT;
 
     android::content::AttributionSourceState mClientAttributionSource; // Owner's attribution source
 
@@ -736,8 +736,8 @@
                                                     // held to read or write those bits reliably.
     audio_input_flags_t     mOrigFlags;             // as specified in constructor or set(), const
 
-    audio_session_t         mSessionId;
-    audio_port_handle_t     mPortId;                    // Id from Audio Policy Manager
+    audio_session_t mSessionId = AUDIO_SESSION_ALLOCATE;
+    audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
 
     /**
      * mLogSessionId is a string identifying this AudioRecord for the metrics service.
@@ -756,9 +756,9 @@
     sp<IMemory>             mBufferMemory;
     audio_io_handle_t       mInput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getInputforAttr()
 
-    int                     mPreviousPriority;  // before start()
-    SchedPolicy             mPreviousSchedulingGroup;
-    bool                    mAwaitBoost;    // thread should wait for priority boost before running
+    int mPreviousPriority = ANDROID_PRIORITY_NORMAL;  // before start()
+    SchedPolicy mPreviousSchedulingGroup = SP_DEFAULT;
+    bool mAwaitBoost = false;  // thread should wait for priority boost before running
 
     // The proxy should only be referenced while a lock is held because the proxy isn't
     // multi-thread safe.
@@ -799,14 +799,17 @@
 
     // For Device Selection API
     //  a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
-    audio_port_handle_t     mSelectedDeviceId; // Device requested by the application.
-    audio_port_handle_t     mRoutedDeviceId;   // Device actually selected by audio policy manager:
-                                              // May not match the app selection depending on other
-                                              // activity and connected devices
+
+    // Device requested by the application.
+    audio_port_handle_t     mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    // Device actually selected by AudioPolicyManager: This may not match the app
+    // selection depending on other activity and connected devices
+    audio_port_handle_t     mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
     wp<AudioSystem::AudioDeviceCallback> mDeviceCallback;
 
-    audio_microphone_direction_t mSelectedMicDirection;
-    float mSelectedMicFieldDimension;
+    audio_microphone_direction_t mSelectedMicDirection = MIC_DIRECTION_UNSPECIFIED;
+    float mSelectedMicFieldDimension = MIC_FIELD_DIMENSION_DEFAULT;
 
     int32_t                    mMaxSharedAudioHistoryMs = 0;
     std::string                mSharedAudioPackageName = {};
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index c238158..5c9a7c6 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -479,6 +479,8 @@
 
     static status_t registerPolicyMixes(const Vector<AudioMix>& mixes, bool registration);
 
+    static status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes);
+
     static status_t updatePolicyMixes(
         const std::vector<
                 std::pair<AudioMix, std::vector<AudioMixMatchCriterion>>>& mixesWithUpdates);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 19780ae..3a001a4 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -257,9 +257,7 @@
     /* Constructs an uninitialized AudioTrack. No connection with
      * AudioFlinger takes place.  Use set() after this.
      */
-                        AudioTrack();
-
-                        AudioTrack(const AttributionSourceState& attributionSourceState);
+    explicit AudioTrack(const AttributionSourceState& attributionSourceState = {});
 
     /* Creates an AudioTrack object and registers it with AudioFlinger.
      * Once created, the track needs to be started before it can be used.
@@ -1312,11 +1310,11 @@
     sp<IMemory>             mSharedBuffer;
     transfer_type           mTransfer;
     audio_offload_info_t    mOffloadInfoCopy;
-    audio_attributes_t      mAttributes;
+    audio_attributes_t mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
     size_t                  mFrameSize;             // frame size in bytes
 
-    status_t                mStatus;
+    status_t mStatus = NO_INIT;
 
     // can change dynamically when IAudioTrack invalidated
     uint32_t                mLatency;               // in ms
@@ -1329,7 +1327,7 @@
         STATE_PAUSED_STOPPING,
         STATE_FLUSHED,
         STATE_STOPPING,
-    }                       mState;
+    } mState = STATE_STOPPED;
 
     static constexpr const char *stateToString(State state)
     {
@@ -1459,8 +1457,8 @@
 
     mutable Mutex           mLock;
 
-    int                     mPreviousPriority;          // before start()
-    SchedPolicy             mPreviousSchedulingGroup;
+    int mPreviousPriority = ANDROID_PRIORITY_NORMAL;  // before start()
+    SchedPolicy mPreviousSchedulingGroup = SP_DEFAULT;
     bool                    mAwaitBoost;    // thread should wait for priority boost before running
 
     // The proxy should only be referenced while a lock is held because the proxy isn't
@@ -1472,14 +1470,17 @@
     sp<AudioTrackClientProxy>       mProxy;         // primary owner of the memory
 
     bool                    mInUnderrun;            // whether track is currently in underrun state
-    uint32_t                mPausedPosition;
+    uint32_t mPausedPosition = 0;
 
     // For Device Selection API
     //  a value of AUDIO_PORT_HANDLE_NONE indicated default (AudioPolicyManager) routing.
-    audio_port_handle_t    mSelectedDeviceId; // Device requested by the application.
-    audio_port_handle_t    mRoutedDeviceId;   // Device actually selected by audio policy manager:
-                                              // May not match the app selection depending on other
-                                              // activity and connected devices.
+
+    // Device requested by the application.
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+
+    // Device actually selected by AudioPolicyManager: This may not match the app
+    // selection depending on other activity and connected devices.
+    audio_port_handle_t mRoutedDeviceId = AUDIO_PORT_HANDLE_NONE;
 
     sp<media::VolumeHandler>       mVolumeHandler;
 
@@ -1537,7 +1538,7 @@
         Mutex mAudioTrackCbLock;
         wp<media::IAudioTrackCallback> mCallback;
     };
-    sp<AudioTrackCallback> mAudioTrackCallback;
+    sp<AudioTrackCallback> mAudioTrackCallback = sp<AudioTrackCallback>::make();
 };
 
 }; // namespace android
diff --git a/media/libaudioclient/include/media/ToneGenerator.h b/media/libaudioclient/include/media/ToneGenerator.h
index 46e9501..3e515fc 100644
--- a/media/libaudioclient/include/media/ToneGenerator.h
+++ b/media/libaudioclient/include/media/ToneGenerator.h
@@ -225,11 +225,14 @@
         TONE_INDIA_CONGESTION,      // Congestion tone: 400 Hz, 250ms ON, 250ms OFF...
         TONE_INDIA_CALL_WAITING,    // Call waiting tone: 400 Hz, tone repeated in a 0.2s on, 0.1s off, 0.2s on, 7.5s off pattern.
         TONE_INDIA_RINGTONE,        // Ring tone: 400 Hz tone modulated with 25Hz, 0.4 on 0.2 off 0.4 on 2..0 off
-         // TAIWAN supervisory tones
+        // TAIWAN supervisory tones
         TONE_TW_RINGTONE,           // Ring Tone: 440 Hz + 480 Hz repeated with pattern 1s on, 3s off.
-         // NEW ZEALAND supervisory tones
+        // NEW ZEALAND supervisory tones
         TONE_NZ_CALL_WAITING,       // Call waiting tone: 400 Hz,  0.2s ON, 3s OFF,
                                     //        0.2s ON, 3s OFF, 0.2s ON, 3s OFF, 0.2s ON
+        // MALAYSIA supervisory tones
+        TONE_MY_CONGESTION,         // Congestion tone: 425 Hz, 500ms ON, 250ms OFF...
+        TONE_MY_RINGTONE,           // Ring tone: 425 Hz, 400ms ON 200ms OFF 400ms ON 2s OFF..
         NUM_ALTERNATE_TONES
     };
 
@@ -244,6 +247,7 @@
         INDIA,
         TAIWAN,
         NZ,
+        MY,
         CEPT,
         NUM_REGIONS
     };
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 8bed4d4..055da5b 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index 0bf2e82..be6c581 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -28,6 +28,25 @@
 
 using namespace android;
 
+// Test that the basic constructor returns an object that doesn't crash
+// on stop() or destruction.
+
+TEST(AudioRecordTestBasic, EmptyAudioRecord) {
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = "AudioRecordTest";
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    const auto ar = sp<AudioRecord>::make(attributionSource);
+
+    // test key commands on an unset AudioRecord.
+    EXPECT_EQ(NO_INIT, ar->initCheck());
+    EXPECT_EQ(true, ar->stopped());
+
+    // just don't crash.
+    ar->stop();
+}
+
 class AudioRecordTest : public ::testing::Test {
   public:
     void SetUp() override {
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 8f76f9b..3b2285e 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -17,6 +17,9 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "AudioRoutingTest"
 
+#include <string.h>
+
+#include <binder/Binder.h>
 #include <binder/ProcessState.h>
 #include <cutils/properties.h>
 #include <gtest/gtest.h>
@@ -149,6 +152,7 @@
         config.sample_rate = 48000;
         AudioMix mix(criteria, mixType, config, mixFlag, String8{mAddress.c_str()}, 0);
         mix.mDeviceType = deviceType;
+        mix.mToken = sp<BBinder>::make();
         mMixes.push(mix);
         if (OK == AudioSystem::registerPolicyMixes(mMixes, true)) {
             mPolicyMixRegistered = true;
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index 0282bd7..cb667a0 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -25,6 +25,24 @@
 
 using namespace android;
 
+// Test that the basic constructor returns an object that doesn't crash
+// on stop() or destruction.
+
+TEST(AudioTrackTestBasic, EmptyAudioTrack) {
+    AttributionSourceState attributionSource;
+    attributionSource.packageName = "AudioTrackTest";
+    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(getuid()));
+    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(getpid()));
+    attributionSource.token = sp<BBinder>::make();
+    const auto at = sp<AudioTrack>::make(attributionSource);
+
+    EXPECT_EQ(NO_INIT, at->initCheck());
+    EXPECT_EQ(true, at->stopped());
+
+    // ensure we do not crash.
+    at->stop();
+}
+
 TEST(AudioTrackTest, TestPlayTrack) {
     const auto ap = sp<AudioPlayback>::make(44100 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT,
                                             AUDIO_CHANNEL_OUT_STEREO, AUDIO_OUTPUT_FLAG_NONE,
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index c35a60e..87d24a5 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -54,6 +54,7 @@
 #include "effectsAidlConversion/AidlConversionVisualizer.h"
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
@@ -293,6 +294,7 @@
 
 status_t EffectHalAidl::close() {
     TIME_CHECK();
+    mEffect->command(CommandId::STOP);
     return statusTFromBinderStatus(mEffect->close());
 }
 
diff --git a/media/libaudiohal/impl/EffectProxy.cpp b/media/libaudiohal/impl/EffectProxy.cpp
index 9aa02e2..af0f8f2 100644
--- a/media/libaudiohal/impl/EffectProxy.cpp
+++ b/media/libaudiohal/impl/EffectProxy.cpp
@@ -156,6 +156,7 @@
 }
 
 ndk::ScopedAStatus EffectProxy::close() {
+    command(CommandId::STOP);
     return runWithAllSubEffects([&](std::shared_ptr<IEffect>& effect) {
         return effect->close();
     });
diff --git a/media/libaudiohal/tests/Android.bp b/media/libaudiohal/tests/Android.bp
index 3c8e087..f6a7eea 100644
--- a/media/libaudiohal/tests/Android.bp
+++ b/media/libaudiohal/tests/Android.bp
@@ -17,6 +17,7 @@
 // frameworks/av/include.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index 57b860d..f9ae2d4 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -441,10 +441,10 @@
                 track->prepareForAdjustChannels(mFrameCount);
             }
             } break;
-        case HAPTIC_INTENSITY: {
-            const os::HapticScale hapticIntensity = static_cast<os::HapticScale>(valueInt);
-            if (track->mHapticIntensity != hapticIntensity) {
-                track->mHapticIntensity = hapticIntensity;
+        case HAPTIC_SCALE: {
+            const os::HapticScale hapticScale = *reinterpret_cast<os::HapticScale*>(value);
+            if (track->mHapticScale != hapticScale) {
+                track->mHapticScale = hapticScale;
             }
             } break;
         case HAPTIC_MAX_AMPLITUDE: {
@@ -585,7 +585,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticIntensity = os::HapticScale::NONE;
+    t->mHapticScale = {/*level=*/os::HapticLevel::NONE };
     t->mHapticMaxAmplitude = NAN;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
@@ -636,7 +636,7 @@
                 switch (t->mMixerFormat) {
                 // Mixer format should be AUDIO_FORMAT_PCM_FLOAT.
                 case AUDIO_FORMAT_PCM_FLOAT: {
-                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticIntensity,
+                    os::scaleHapticData((float*) buffer, sampleCount, t->mHapticScale,
                                         t->mHapticMaxAmplitude);
                 } break;
                 default:
diff --git a/media/libaudioprocessing/include/media/AudioMixer.h b/media/libaudioprocessing/include/media/AudioMixer.h
index b39fb92..f558fd5 100644
--- a/media/libaudioprocessing/include/media/AudioMixer.h
+++ b/media/libaudioprocessing/include/media/AudioMixer.h
@@ -49,7 +49,7 @@
         DOWNMIX_TYPE    = 0x4004,
         // for haptic
         HAPTIC_ENABLED  = 0x4007, // Set haptic data from this track should be played or not.
-        HAPTIC_INTENSITY = 0x4008, // Set the intensity to play haptic data.
+        HAPTIC_SCALE = 0x4008, // Set the scale to play haptic data.
         HAPTIC_MAX_AMPLITUDE = 0x4009, // Set the max amplitude allowed for haptic data.
         // for target TIMESTRETCH
         PLAYBACK_RATE   = 0x4300, // Configure timestretch on this track name;
@@ -141,7 +141,7 @@
 
         // Haptic
         bool                 mHapticPlaybackEnabled;
-        os::HapticScale      mHapticIntensity;
+        os::HapticScale      mHapticScale;
         float                mHapticMaxAmplitude;
         audio_channel_mask_t mHapticChannelMask;
         uint32_t             mHapticChannelCount;
diff --git a/media/libaudioprocessing/tests/Android.bp b/media/libaudioprocessing/tests/Android.bp
index 48c97ab..ba9b165 100644
--- a/media/libaudioprocessing/tests/Android.bp
+++ b/media/libaudioprocessing/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for libaudioprocessing
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libaudioprocessing/tests/fuzzer/Android.bp b/media/libaudioprocessing/tests/fuzzer/Android.bp
index e4780cf..b96ec6b 100644
--- a/media/libaudioprocessing/tests/fuzzer/Android.bp
+++ b/media/libaudioprocessing/tests/fuzzer/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/downmix/Android.bp b/media/libeffects/downmix/Android.bp
index fda5acc..19b8082 100644
--- a/media/libeffects/downmix/Android.bp
+++ b/media/libeffects/downmix/Android.bp
@@ -1,5 +1,6 @@
 // Multichannel downmix effect library
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_downmix_license",
     ],
diff --git a/media/libeffects/downmix/benchmark/Android.bp b/media/libeffects/downmix/benchmark/Android.bp
index 10f14e2..5b62a0c 100644
--- a/media/libeffects/downmix/benchmark/Android.bp
+++ b/media/libeffects/downmix/benchmark/Android.bp
@@ -1,5 +1,6 @@
 // Build testbench for downmix module.
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
diff --git a/media/libeffects/downmix/tests/Android.bp b/media/libeffects/downmix/tests/Android.bp
index 8cecbe2..77d8f83 100644
--- a/media/libeffects/downmix/tests/Android.bp
+++ b/media/libeffects/downmix/tests/Android.bp
@@ -1,5 +1,6 @@
 // Build testbench for downmix module.
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_downmix_license"
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index 9c440df..ada301b 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -118,26 +118,21 @@
 
 RetCode DynamicsProcessingContext::setPreEq(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.preEqStage.inUse,
-                                        StageType::PREEQ);
+    return setDpChannels_l<dp_fx::DPEq>(channels, StageType::PREEQ);
 }
 
 RetCode DynamicsProcessingContext::setPostEq(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    return setDpChannels_l<dp_fx::DPEq>(channels, mEngineArchitecture.postEqStage.inUse,
-                                        StageType::POSTEQ);
+    return setDpChannels_l<dp_fx::DPEq>(channels, StageType::POSTEQ);
 }
 
 RetCode DynamicsProcessingContext::setMbc(
         const std::vector<DynamicsProcessing::ChannelConfig>& channels) {
-    return setDpChannels_l<dp_fx::DPMbc>(channels, mEngineArchitecture.mbcStage.inUse,
-                                         StageType::MBC);
+    return setDpChannels_l<dp_fx::DPMbc>(channels, StageType::MBC);
 }
 
 RetCode DynamicsProcessingContext::setPreEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
-    RETURN_VALUE_IF(!mEngineArchitecture.preEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "preEqNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.preEqStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -146,8 +141,6 @@
 
 RetCode DynamicsProcessingContext::setPostEqBand(
         const std::vector<DynamicsProcessing::EqBandConfig>& bands) {
-    RETURN_VALUE_IF(!mEngineArchitecture.postEqStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "postEqNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.postEqStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -156,8 +149,6 @@
 
 RetCode DynamicsProcessingContext::setMbcBand(
         const std::vector<DynamicsProcessing::MbcBandConfig>& bands) {
-    RETURN_VALUE_IF(!mEngineArchitecture.mbcStage.inUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "mbcNotInUse");
     RETURN_VALUE_IF(
             !validateBandConfig(bands, mChannelCount, mEngineArchitecture.mbcStage.bandCount),
             RetCode::ERROR_ILLEGAL_PARAMETER, "eqBandNotValid");
@@ -166,8 +157,6 @@
 
 RetCode DynamicsProcessingContext::setLimiter(
         const std::vector<DynamicsProcessing::LimiterConfig>& limiters) {
-    RETURN_VALUE_IF(!mEngineArchitecture.limiterInUse, RetCode::ERROR_ILLEGAL_PARAMETER,
-                    "limiterNotInUse");
     RETURN_VALUE_IF(!validateLimiterConfig(limiters, mChannelCount),
                     RetCode::ERROR_ILLEGAL_PARAMETER, "limiterConfigNotValid");
     return setBands_l<DynamicsProcessing::LimiterConfig>(limiters, StageType::LIMITER);
@@ -419,9 +408,7 @@
         }
         freqs[band.band] = band.cutoffFrequencyHz;
     }
-    return std::is_sorted(freqs.begin(), freqs.end(), [](const auto& a, const auto& b) {
-        return a.second <= b.second; //index is already sorted as map key
-    });
+    return true;
 }
 
 bool DynamicsProcessingContext::validateLimiterConfig(
@@ -442,17 +429,10 @@
 
 template <typename D>
 RetCode DynamicsProcessingContext::setDpChannels_l(
-        const std::vector<DynamicsProcessing::ChannelConfig>& channels, bool stageInUse,
-        StageType type) {
+        const std::vector<DynamicsProcessing::ChannelConfig>& channels, StageType type) {
     RetCode ret = RetCode::SUCCESS;
     std::unordered_set<int> channelSet;
 
-    if (!stageInUse) {
-        LOG(WARNING) << __func__ << " not in use " << ::android::internal::ToString(channels);
-        return RetCode::ERROR_ILLEGAL_PARAMETER;
-    }
-
-    RETURN_VALUE_IF(!stageInUse, RetCode::ERROR_ILLEGAL_PARAMETER, "stageNotInUse");
     for (auto& it : channels) {
         if (0 != channelSet.count(it.channel)) {
             LOG(WARNING) << __func__ << " duplicated channel " << it.channel;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index a059dd0..ce657db 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -99,7 +99,7 @@
     dp_fx::DPEq* getEqWithType_l(StageType type, int ch);
     template <typename D>
     RetCode setDpChannels_l(const std::vector<DynamicsProcessing::ChannelConfig>& channels,
-                            bool stageInUse, StageType type);
+                            StageType type);
     template <typename T /* BandConfig */>
     RetCode setBands_l(const std::vector<T>& bands, StageType type);
     RetCode setDpChannelBand_l(const std::any& anyConfig, StageType type,
diff --git a/media/libeffects/factory/Android.bp b/media/libeffects/factory/Android.bp
index 01eb8ee..9be45a5 100644
--- a/media/libeffects/factory/Android.bp
+++ b/media/libeffects/factory/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index a8892d8..f60d616 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -114,10 +114,11 @@
     std::stringstream ss;
     ss << "\t\tHaptic setting:\n";
     ss << "\t\t- tracks intensity map:\n";
-    for (const auto&[id, intensity] : param.id2Intensity) {
-        ss << "\t\t\t- id=" << id << ", intensity=" << (int) intensity;
+    for (const auto&[id, hapticScale] : param.id2HapticScale) {
+        ss << "\t\t\t- id=" << id << ", hapticLevel=" << (int) hapticScale.getLevel()
+           << ", adaptiveScaleFactor=" << hapticScale.getAdaptiveScaleFactor();
     }
-    ss << "\t\t- max intensity: " << (int) param.maxHapticIntensity << '\n';
+    ss << "\t\t- max scale level: " << (int) param.maxHapticScale.getLevel() << '\n';
     ss << "\t\t- max haptic amplitude: " << param.maxHapticAmplitude << '\n';
     return ss.str();
 }
@@ -145,7 +146,7 @@
     memset(context->param.hapticChannelSource, 0, sizeof(context->param.hapticChannelSource));
     context->param.hapticChannelCount = 0;
     context->param.audioChannelCount = 0;
-    context->param.maxHapticIntensity = os::HapticScale::MUTE;
+    context->param.maxHapticScale = os::HapticScale::mute();
 
     context->param.resonantFrequency = DEFAULT_RESONANT_FREQUENCY;
     context->param.bpfQ = 1.0f;
@@ -312,21 +313,25 @@
                                  void *value) {
     switch (param) {
     case HG_PARAM_HAPTIC_INTENSITY: {
-        if (value == nullptr || size != (uint32_t) (2 * sizeof(int))) {
+        if (value == nullptr || size != (uint32_t) (2 * sizeof(int) + sizeof(float))) {
             return -EINVAL;
         }
-        int id = *(int *) value;
-        os::HapticScale hapticIntensity = static_cast<os::HapticScale>(*((int *) value + 1));
-        ALOGD("Setting haptic intensity as %d", static_cast<int>(hapticIntensity));
-        if (hapticIntensity == os::HapticScale::MUTE) {
-            context->param.id2Intensity.erase(id);
+        const int id = *(int *) value;
+        const os::HapticLevel hapticLevel = static_cast<os::HapticLevel>(*((int *) value + 1));
+        const float adaptiveScaleFactor = (*((float *) value + 2));
+        const os::HapticScale hapticScale = {hapticLevel, adaptiveScaleFactor};
+        ALOGD("Updating haptic scale, hapticLevel=%d, adaptiveScaleFactor=%f",
+              static_cast<int>(hapticLevel), adaptiveScaleFactor);
+        if (hapticScale.isScaleMute()) {
+            context->param.id2HapticScale.erase(id);
         } else {
-            context->param.id2Intensity.emplace(id, hapticIntensity);
+            context->param.id2HapticScale.emplace(id, hapticScale);
         }
-        context->param.maxHapticIntensity = hapticIntensity;
-        for (const auto&[id, intensity] : context->param.id2Intensity) {
-            context->param.maxHapticIntensity = std::max(
-                    context->param.maxHapticIntensity, intensity);
+        context->param.maxHapticScale = hapticScale;
+        for (const auto&[id, scale] : context->param.id2HapticScale) {
+            if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
+                context->param.maxHapticScale = scale;
+            }
         }
         break;
     }
@@ -478,7 +483,7 @@
         return -ENODATA;
     }
 
-    if (context->param.maxHapticIntensity == os::HapticScale::MUTE) {
+    if (context->param.maxHapticScale.isScaleMute()) {
         // Haptic channels are muted, not need to generate haptic data.
         return 0;
     }
@@ -504,8 +509,9 @@
     float* hapticOutBuffer = HapticGenerator_runProcessingChain(
             context->processingChain, context->inputBuffer.data(),
             context->outputBuffer.data(), inBuffer->frameCount);
-    os::scaleHapticData(hapticOutBuffer, hapticSampleCount, context->param.maxHapticIntensity,
-                        context->param.maxHapticAmplitude);
+        os::scaleHapticData(hapticOutBuffer, hapticSampleCount,
+                            context->param.maxHapticScale,
+                            context->param.maxHapticAmplitude);
 
     // For haptic data, the haptic playback thread will copy the data from effect input buffer,
     // which contains haptic data at the end of the buffer, directly to sink buffer.
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
index 85e961f..dbfc5ea 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.h
@@ -48,9 +48,9 @@
     uint32_t audioChannelCount;
     uint32_t hapticChannelCount;
 
-    // A map from track id to haptic intensity.
-    std::map<int, os::HapticScale> id2Intensity;
-    os::HapticScale maxHapticIntensity; // max intensity will be used to scale haptic data.
+    // A map from track id to haptic scale.
+    std::map<int, os::HapticScale> id2HapticScale;
+    os::HapticScale maxHapticScale; // max haptic scale will be used to scale haptic data.
     float maxHapticAmplitude; // max amplitude will be used to limit haptic data absolute values.
 
     float resonantFrequency;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index e4b0484..0a04250 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -155,7 +155,7 @@
             runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
     ::android::os::scaleHapticData(
             hapticOutBuffer, hapticSampleCount,
-            static_cast<::android::os::HapticScale>(mParams.mMaxVibratorScale),
+            {/*level=*/static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale) },
             mParams.mVibratorInfo.qFactor);
 
     // For haptic data, the haptic playback thread will copy the data from effect input
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index c21c5f2..8036983 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index be20684..02b918b 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -1,5 +1,6 @@
 // Music bundle
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_lvm_lib_license",
     ],
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 9bb3264..c32e91e 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for effects
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 84b49f2..5b48045 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -1,5 +1,6 @@
 // music bundle wrapper
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_lvm_wrapper_license",
     ],
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index ce45a19..44b7d97 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -1,5 +1,6 @@
 // audio preprocessing wrapper
 package {
+    default_team: "trendy_team_media_framework_audio",
     default_applicable_licenses: [
         "frameworks_av_media_libeffects_preprocessing_license",
     ],
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index fbbcab4..ca99bf8 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index d80b135..ad8d84d 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -1,5 +1,6 @@
 // audio preprocessing unit test
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_media_libeffects_preprocessing_license"
diff --git a/media/libeffects/spatializer/benchmarks/Android.bp b/media/libeffects/spatializer/benchmarks/Android.bp
index ab7e468..2d07a9b 100644
--- a/media/libeffects/spatializer/benchmarks/Android.bp
+++ b/media/libeffects/spatializer/benchmarks/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libeffects/spatializer/tests/Android.bp b/media/libeffects/spatializer/tests/Android.bp
index 818e094..ddfcff3 100644
--- a/media/libeffects/spatializer/tests/Android.bp
+++ b/media/libeffects/spatializer/tests/Android.bp
@@ -1,6 +1,7 @@
 // Build the unit tests for spatializer effect
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 9955862..70a242d 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,18 +12,18 @@
     name: "libheadtracking",
     host_supported: true,
     srcs: [
-      "HeadTrackingProcessor.cpp",
-      "ModeSelector.cpp",
-      "Pose.cpp",
-      "PoseBias.cpp",
-      "PoseDriftCompensator.cpp",
-      "PosePredictor.cpp",
-      "PoseRateLimiter.cpp",
-      "QuaternionUtil.cpp",
-      "ScreenHeadFusion.cpp",
-      "StillnessDetector.cpp",
-      "Twist.cpp",
-      "VectorRecorder.cpp",
+        "HeadTrackingProcessor.cpp",
+        "ModeSelector.cpp",
+        "Pose.cpp",
+        "PoseBias.cpp",
+        "PoseDriftCompensator.cpp",
+        "PosePredictor.cpp",
+        "PoseRateLimiter.cpp",
+        "QuaternionUtil.cpp",
+        "ScreenHeadFusion.cpp",
+        "StillnessDetector.cpp",
+        "Twist.cpp",
+        "VectorRecorder.cpp",
     ],
     shared_libs: [
         "libaudioutils",
@@ -51,7 +52,7 @@
 cc_library {
     name: "libheadtracking-binding",
     srcs: [
-      "SensorPoseProvider.cpp",
+        "SensorPoseProvider.cpp",
     ],
     shared_libs: [
         "libbase",
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 085a7e4..ee4075f 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -32,6 +32,7 @@
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/RefBase.h>
+#include <algorithm>
 #include <vector>
 
 HeifDecoder* createHeifDecoder() {
@@ -42,7 +43,10 @@
 
 void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
     info->mWidth = videoFrame->mDisplayWidth;
-    info->mHeight = videoFrame->mDisplayHeight;
+    // Number of scanlines is mDisplayHeight. Clamp it to mHeight to guard
+    // against malformed streams claiming that mDisplayHeight is greater than
+    // mHeight.
+    info->mHeight = std::min(videoFrame->mDisplayHeight, videoFrame->mHeight);
     info->mRotationAngle = videoFrame->mRotationAngle;
     info->mBytesPerPixel = videoFrame->mBytesPerPixel;
     info->mDurationUs = videoFrame->mDurationUs;
@@ -746,7 +750,9 @@
                    (videoFrame->mRowBytes * (mCurScanline + videoFrame->mDisplayTop)) +
                    (videoFrame->mBytesPerPixel * videoFrame->mDisplayLeft);
     mCurScanline++;
-    memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mDisplayWidth);
+    // Do not try to copy more than |videoFrame->mWidth| pixels.
+    uint32_t width = std::min(videoFrame->mDisplayWidth, videoFrame->mWidth);
+    memcpy(dst, src, videoFrame->mBytesPerPixel * width);
     return true;
 }
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 89348a4..3ab32f0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -111,9 +111,12 @@
 // To collect the encoder usage for the battery app
 static void addBatteryData(uint32_t params) {
     sp<IBinder> binder =
-        defaultServiceManager()->getService(String16("media.player"));
+        defaultServiceManager()->waitForService(String16("media.player"));
     sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
-    CHECK(service.get() != NULL);
+    if (service.get() == nullptr) {
+        ALOGE("%s: Failed to get media.player service", __func__);
+        return;
+    }
 
     service->addBatteryData(params);
 }
@@ -1453,29 +1456,44 @@
 }
 
 status_t StagefrightRecorder::setupAACRecording() {
-    // FIXME:
-    // Add support for OUTPUT_FORMAT_AAC_ADIF
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
+    // TODO(b/324512842): Add support for OUTPUT_FORMAT_AAC_ADIF
+    if (mOutputFormat != OUTPUT_FORMAT_AAC_ADTS) {
+        ALOGE("Invalid output format %d used for AAC recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
-    CHECK(mAudioEncoder == AUDIO_ENCODER_AAC ||
-          mAudioEncoder == AUDIO_ENCODER_HE_AAC ||
-          mAudioEncoder == AUDIO_ENCODER_AAC_ELD);
-    CHECK(mAudioSource != AUDIO_SOURCE_CNT);
+    if (mAudioEncoder != AUDIO_ENCODER_AAC
+            && mAudioEncoder != AUDIO_ENCODER_HE_AAC
+            && mAudioEncoder != AUDIO_ENCODER_AAC_ELD) {
+        ALOGE("Invalid encoder %d used for AAC recording", mAudioEncoder);
+        return BAD_VALUE;
+    }
+
+    if (mAudioSource == AUDIO_SOURCE_CNT) {
+        ALOGE("Audio source hasn't been set correctly");
+        return BAD_VALUE;
+    }
 
     mWriter = new AACWriter(mOutputFd);
     return setupRawAudioRecording();
 }
 
 status_t StagefrightRecorder::setupOggRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_OGG);
+    if (mOutputFormat != OUTPUT_FORMAT_OGG) {
+        ALOGE("Invalid output format %d used for OGG recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     mWriter = new OggWriter(mOutputFd);
     return setupRawAudioRecording();
 }
 
 status_t StagefrightRecorder::setupAMRRecording() {
-    CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
-          mOutputFormat == OUTPUT_FORMAT_AMR_WB);
+    if (mOutputFormat != OUTPUT_FORMAT_AMR_NB
+            && mOutputFormat != OUTPUT_FORMAT_AMR_WB) {
+        ALOGE("Invalid output format %d used for AMR recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     if (mOutputFormat == OUTPUT_FORMAT_AMR_NB) {
         if (mAudioEncoder != AUDIO_ENCODER_DEFAULT &&
@@ -1528,7 +1546,10 @@
 }
 
 status_t StagefrightRecorder::setupRTPRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
+    if (mOutputFormat != OUTPUT_FORMAT_RTP_AVP) {
+        ALOGE("Invalid output format %d used for RTP recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     if ((mAudioSource != AUDIO_SOURCE_CNT
                 && mVideoSource != VIDEO_SOURCE_LIST_END)
@@ -1571,7 +1592,10 @@
 }
 
 status_t StagefrightRecorder::setupMPEG2TSRecording() {
-    CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
+    if (mOutputFormat != OUTPUT_FORMAT_MPEG2TS) {
+        ALOGE("Invalid output format %d used for MPEG2TS recording", mOutputFormat);
+        return BAD_VALUE;
+    }
 
     sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
 
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 507da29..74b0a85 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -110,6 +110,17 @@
         "libresourcemanagerservice",
         "libmediametricsservice",
         "mediametricsservice-aidl-cpp",
+        "libcameraservice",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V3-ndk",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.4",
+        "libaudiohal@7.0",
     ],
     header_libs: [
         "libaudiohal_headers",
diff --git a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
index fdac1a1..2518c21 100644
--- a/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediarecorder_fuzzer.cpp
@@ -15,22 +15,22 @@
  *
  */
 
-#include <media/stagefright/foundation/AString.h>
-#include "fuzzer/FuzzedDataProvider.h"
-
 #include <AudioFlinger.h>
 #include <MediaPlayerService.h>
 #include <ResourceManagerService.h>
-#include <fakeservicemanager/FakeServiceManager.h>
 #include <StagefrightRecorder.h>
 #include <camera/Camera.h>
 #include <camera/android/hardware/ICamera.h>
+#include <fakeservicemanager/FakeServiceManager.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <media/stagefright/foundation/AString.h>
 #include <mediametricsservice/MediaMetricsService.h>
 #include <thread>
+#include "CameraService.h"
+#include "fuzzer/FuzzedDataProvider.h"
 
 using namespace std;
 using namespace android;
@@ -46,32 +46,27 @@
     AUDIO_SOURCE_VOICE_RECOGNITION, AUDIO_SOURCE_VOICE_COMMUNICATION,
     AUDIO_SOURCE_REMOTE_SUBMIX,     AUDIO_SOURCE_UNPROCESSED,
     AUDIO_SOURCE_VOICE_PERFORMANCE, AUDIO_SOURCE_ECHO_REFERENCE,
-    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD};
+    AUDIO_SOURCE_FM_TUNER,          AUDIO_SOURCE_HOTWORD,
+    AUDIO_SOURCE_ULTRASOUND};
+
+constexpr output_format kOutputFormat[] = {
+        OUTPUT_FORMAT_DEFAULT,        OUTPUT_FORMAT_THREE_GPP,
+        OUTPUT_FORMAT_MPEG_4,         OUTPUT_FORMAT_AUDIO_ONLY_START,
+        OUTPUT_FORMAT_RAW_AMR,        OUTPUT_FORMAT_AMR_NB,
+        OUTPUT_FORMAT_AMR_WB,         OUTPUT_FORMAT_AAC_ADTS,
+        OUTPUT_FORMAT_AUDIO_ONLY_END, OUTPUT_FORMAT_RTP_AVP,
+        OUTPUT_FORMAT_MPEG2TS,        OUTPUT_FORMAT_WEBM,
+        OUTPUT_FORMAT_HEIF,           OUTPUT_FORMAT_OGG,
+        OUTPUT_FORMAT_LIST_END};
+
+constexpr video_encoder kVideoEncoder[] = {
+        VIDEO_ENCODER_DEFAULT,      VIDEO_ENCODER_H263, VIDEO_ENCODER_H264,
+        VIDEO_ENCODER_MPEG_4_SP,    VIDEO_ENCODER_VP8,  VIDEO_ENCODER_HEVC,
+        VIDEO_ENCODER_DOLBY_VISION, VIDEO_ENCODER_AV1,  VIDEO_ENCODER_LIST_END};
 
 constexpr audio_microphone_direction_t kSupportedMicrophoneDirections[] = {
     MIC_DIRECTION_UNSPECIFIED, MIC_DIRECTION_FRONT, MIC_DIRECTION_BACK, MIC_DIRECTION_EXTERNAL};
 
-struct RecordingConfig {
-    output_format outputFormat;
-    audio_encoder audioEncoder;
-    video_encoder videoEncoder;
-};
-
-const struct RecordingConfig kRecordingConfigList[] = {
-    {OUTPUT_FORMAT_AMR_NB, AUDIO_ENCODER_AMR_NB, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AMR_WB, AUDIO_ENCODER_AMR_WB, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_HE_AAC, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_AAC_ADTS, AUDIO_ENCODER_AAC_ELD, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_OGG, AUDIO_ENCODER_OPUS, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_RTP_AVP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_DEFAULT},
-    {OUTPUT_FORMAT_MPEG2TS, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
-    {OUTPUT_FORMAT_WEBM, AUDIO_ENCODER_VORBIS, VIDEO_ENCODER_VP8},
-    {OUTPUT_FORMAT_THREE_GPP, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_AAC, VIDEO_ENCODER_H264},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_MPEG_4_SP},
-    {OUTPUT_FORMAT_MPEG_4, AUDIO_ENCODER_DEFAULT, VIDEO_ENCODER_HEVC}};
-
 const string kParametersList[] = {"max-duration",
                                   "max-filesize",
                                   "interleave-duration-us",
@@ -104,14 +99,16 @@
                                   "rtp-param-ext-cvo-degrees",
                                   "video-param-request-i-frame",
                                   "rtp-param-set-socket-dscp",
-                                  "rtp-param-set-socket-network"};
+                                  "rtp-param-set-socket-network",
+                                  "rtp-param-set-socket-ecn",
+                                  "rtp-param-remote-ip",
+                                  "rtp-param-set-socket-network",
+                                  "log-session-id"};
 
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
 constexpr int32_t kMinVideoSize = 2;
 constexpr int32_t kMaxVideoSize = 8192;
-constexpr int32_t kNumRecordMin = 1;
-constexpr int32_t kNumRecordMax = 10;
+const char kOutputFile[] = "OutputFile";
+const char kNextOutputFile[] = "NextOutputFile";
 
 class TestAudioDeviceCallback : public AudioSystem::AudioDeviceCallback {
    public:
@@ -194,8 +191,7 @@
     int32_t max;
     mStfRecorder->getMaxAmplitude(&max);
 
-    int32_t deviceId = mFdp.ConsumeIntegral<int32_t>();
-    mStfRecorder->setInputDevice(deviceId);
+    int32_t deviceId;
     mStfRecorder->getRoutedDeviceId(&deviceId);
 
     vector<android::media::MicrophoneInfoFw> activeMicrophones{};
@@ -213,101 +209,189 @@
     sp<IGraphicBufferProducer> buffer = mStfRecorder->querySurfaceMediaSource();
 }
 
-void MediaRecorderClientFuzzer::dumpInfo() {
-    int32_t dumpFd = memfd_create("DumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mStfRecorder->dump(dumpFd, args);
-    close(dumpFd);
-}
-
-void MediaRecorderClientFuzzer::setConfig() {
-    mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
-    mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
-    mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
-    mStfRecorder->setPreferredMicrophoneDirection(
-        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
-    mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool());
-    bool isPrivacySensitive;
-    mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
-    mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize),
-                               mFdp.ConsumeIntegralInRange<int32_t>(kMinVideoSize, kMaxVideoSize));
-    mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>());
-    mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool());
-    mStfRecorder->setPreferredMicrophoneFieldDimension(mFdp.ConsumeFloatingPoint<float>());
-    mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
-
-    int32_t Idx = mFdp.ConsumeIntegralInRange<int32_t>(0, size(kRecordingConfigList) - 1);
-    mStfRecorder->setOutputFormat(kRecordingConfigList[Idx].outputFormat);
-    mStfRecorder->setAudioEncoder(kRecordingConfigList[Idx].audioEncoder);
-    mStfRecorder->setVideoEncoder(kRecordingConfigList[Idx].videoEncoder);
-
-    int32_t nextOutputFd = memfd_create("NextOutputFile", MFD_ALLOW_SEALING);
-    mStfRecorder->setNextOutputFile(nextOutputFd);
-    close(nextOutputFd);
-
-    for (Idx = 0; Idx < size(kParametersList); ++Idx) {
-        if (mFdp.ConsumeBool()) {
-            int32_t value = mFdp.ConsumeIntegral<int32_t>();
-            mStfRecorder->setParameters(
-                String8((kParametersList[Idx] + "=" + to_string(value)).c_str()));
-        }
+template <typename FuncWrapper>
+void callMediaAPI(FuncWrapper funcWrapper, FuzzedDataProvider* fdp) {
+    if (fdp->ConsumeBool()) {
+        funcWrapper();
     }
 }
 
-MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t *data, size_t size)
-    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create("OutputFile", MFD_ALLOW_SEALING)) {
+void MediaRecorderClientFuzzer::setConfig() {
+    callMediaAPI(
+            [this]() {
+                mSurfaceControl = mComposerClient.createSurface(
+                        String8(mFdp.ConsumeRandomLengthString().c_str()) /* name */,
+                        mFdp.ConsumeIntegral<uint32_t>() /* width */,
+                        mFdp.ConsumeIntegral<uint32_t>() /* height */,
+                        mFdp.ConsumeIntegral<int32_t>() /* pixel-format */,
+                        mFdp.ConsumeIntegral<int32_t>() /* flags */);
+                if (mSurfaceControl) {
+                    mSurface = mSurfaceControl->getSurface();
+                    mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
+                }
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setInputDevice(mFdp.ConsumeIntegral<int32_t>()); },
+                 &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
+                mStfRecorder->setListener(listener);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestCamera> testCamera = sp<TestCamera>::make();
+                sp<Camera> camera = Camera::create(testCamera);
+                mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
+                mStfRecorder->setInputSurface(persistentSurface);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
+                mStfRecorder->setAudioDeviceCallback(callback);
+                mStfRecorder->setOutputFile(mMediaRecorderOutputFd);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setAudioSource(mFdp.PickValueInArray(kSupportedAudioSources));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setVideoSource(mFdp.PickValueInArray(kSupportedVideoSources));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setPreferredMicrophoneDirection(
+                        mFdp.PickValueInArray(kSupportedMicrophoneDirections));
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setPrivacySensitive(mFdp.ConsumeBool()); }, &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                bool isPrivacySensitive;
+                mStfRecorder->isPrivacySensitive(&isPrivacySensitive);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setVideoSize(mFdp.ConsumeIntegralInRange<int32_t>(
+                                                   kMinVideoSize, kMaxVideoSize) /* width */,
+                                           mFdp.ConsumeIntegralInRange<int32_t>(
+                                                   kMinVideoSize, kMaxVideoSize) /* height */);
+            },
+            &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->setVideoFrameRate(mFdp.ConsumeIntegral<int32_t>()); },
+                 &mFdp);
+
+    callMediaAPI([this]() { mStfRecorder->enableAudioDeviceCallback(mFdp.ConsumeBool()); }, &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setPreferredMicrophoneFieldDimension(
+                        mFdp.ConsumeFloatingPoint<float>());
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                mStfRecorder->setClientName(String16(mFdp.ConsumeRandomLengthString().c_str()));
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                output_format OutputFormat = mFdp.PickValueInArray(kOutputFormat);
+                audio_encoder AudioEncoderFormat =
+                        (audio_encoder)mFdp.ConsumeIntegralInRange<int32_t>(AUDIO_ENCODER_DEFAULT,
+                                                                            AUDIO_ENCODER_LIST_END);
+                video_encoder VideoEncoderFormat = mFdp.PickValueInArray(kVideoEncoder);
+                if (OutputFormat == OUTPUT_FORMAT_AMR_NB) {
+                    AudioEncoderFormat =
+                            mFdp.ConsumeBool() ? AUDIO_ENCODER_DEFAULT : AUDIO_ENCODER_AMR_NB;
+                } else if (OutputFormat == OUTPUT_FORMAT_AMR_WB) {
+                    AudioEncoderFormat = AUDIO_ENCODER_AMR_WB;
+                } else if (OutputFormat == OUTPUT_FORMAT_AAC_ADIF ||
+                           OutputFormat == OUTPUT_FORMAT_AAC_ADTS ||
+                           OutputFormat == OUTPUT_FORMAT_MPEG2TS) {
+                    AudioEncoderFormat = (audio_encoder)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_ENCODER_AAC, AUDIO_ENCODER_AAC_ELD);
+                    if (OutputFormat == OUTPUT_FORMAT_MPEG2TS) {
+                        VideoEncoderFormat = VIDEO_ENCODER_H264;
+                    }
+                }
+                mStfRecorder->setOutputFormat(OutputFormat);
+                mStfRecorder->setAudioEncoder(AudioEncoderFormat);
+                mStfRecorder->setVideoEncoder(VideoEncoderFormat);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                int32_t nextOutputFd = memfd_create(kNextOutputFile, MFD_ALLOW_SEALING);
+                mStfRecorder->setNextOutputFile(nextOutputFd);
+                close(nextOutputFd);
+            },
+            &mFdp);
+
+    callMediaAPI(
+            [this]() {
+                for (int32_t idx = 0; idx < size(kParametersList); ++idx) {
+                    if (mFdp.ConsumeBool()) {
+                        int32_t value = mFdp.ConsumeIntegral<int32_t>();
+                        mStfRecorder->setParameters(
+                                String8((kParametersList[idx] + "=" + to_string(value)).c_str()));
+                    }
+                }
+            },
+            &mFdp);
+}
+
+MediaRecorderClientFuzzer::MediaRecorderClientFuzzer(const uint8_t* data, size_t size)
+    : mFdp(data, size), mMediaRecorderOutputFd(memfd_create(kOutputFile, MFD_ALLOW_SEALING)) {
     AttributionSourceState attributionSource;
     attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
     attributionSource.token = sp<BBinder>::make();
     mStfRecorder = make_unique<StagefrightRecorder>(attributionSource);
-
-    mSurfaceControl = mComposerClient.createSurface(
-        String8(mFdp.ConsumeRandomLengthString().c_str()), mFdp.ConsumeIntegral<uint32_t>(),
-        mFdp.ConsumeIntegral<uint32_t>(), mFdp.ConsumeIntegral<int32_t>(),
-        mFdp.ConsumeIntegral<int32_t>());
-    if (mSurfaceControl) {
-        mSurface = mSurfaceControl->getSurface();
-        mStfRecorder->setPreviewSurface(mSurface->getIGraphicBufferProducer());
-    }
-
-    sp<TestMediaRecorderClient> listener = sp<TestMediaRecorderClient>::make();
-    mStfRecorder->setListener(listener);
-
-    sp<TestCamera> testCamera = sp<TestCamera>::make();
-    sp<Camera> camera = Camera::create(testCamera);
-    mStfRecorder->setCamera(camera->remote(), camera->getRecordingProxy());
-
-    sp<PersistentSurface> persistentSurface = sp<PersistentSurface>::make();
-    mStfRecorder->setInputSurface(persistentSurface);
-
-    sp<TestAudioDeviceCallback> callback = sp<TestAudioDeviceCallback>::make();
-    mStfRecorder->setAudioDeviceCallback(callback);
 }
 
 void MediaRecorderClientFuzzer::process() {
-    setConfig();
-
     mStfRecorder->init();
     mStfRecorder->prepare();
-    size_t numRecord = mFdp.ConsumeIntegralInRange<size_t>(kNumRecordMin, kNumRecordMax);
-    for (size_t Idx = 0; Idx < numRecord; ++Idx) {
-        mStfRecorder->start();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->pause();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->resume();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mStfRecorder->stop();
+    while (mFdp.remaining_bytes()) {
+        auto invokeMediaPLayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { setConfig(); },
+                [&]() { mStfRecorder->start(); },
+                [&]() { mStfRecorder->pause(); },
+                [&]() { mStfRecorder->resume(); },
+                [&]() { mStfRecorder->stop(); },
+                [&]() { getConfig(); },
+                [&]() { mStfRecorder->close(); },
+                [&]() { mStfRecorder->reset(); },
+        });
+        invokeMediaPLayerApi();
     }
-    dumpInfo();
-    getConfig();
-
-    mStfRecorder->close();
-    mStfRecorder->reset();
 }
 
 extern "C" int LLVMFuzzerInitialize(int /* *argc */, char /* ***argv */) {
@@ -320,6 +404,7 @@
     MediaPlayerService::instantiate();
     AudioFlinger::instantiate();
     ResourceManagerService::instantiate();
+    CameraService::instantiate();
     fakeServiceManager->addService(String16(MediaMetricsService::kServiceName),
                                     new MediaMetricsService());
     return 0;
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index 6e48078..486a34f 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 19f9549..6ea40e3 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -1,4 +1,3 @@
-
 package {
     default_applicable_licenses: ["frameworks_av_media_mediaserver_license"],
 }
diff --git a/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
index e8fea73..fb9f1e9 100644
--- a/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
+++ b/media/module/codecs/mp3dec/src/pvmp3_framedecoder.cpp
@@ -310,26 +310,31 @@
 }
 
 // Check if the input is valid by checking if it contains a sync word
-static bool isInputValid(uint8 *buf, uint32 inSize)
+static ERROR_CODE validate_input(uint8 *buf, uint32 inSize)
 {
-    // Buffer needs to contain at least 4 bytes which is the size of
-    // the header
-    if (inSize < 4) return false;
+    /*
+     * Verify that at least the header is complete
+     * Note that SYNC_WORD_LNGTH is in unit of bits, but inSize is in unit of bytes.
+     */
+    if (inSize < ((SYNC_WORD_LNGTH + 21) >> 3))
+    {
+        return NO_ENOUGH_MAIN_DATA_ERROR;
+    }
 
     size_t totalInSize = 0;
     size_t frameSize = 0;
     while (totalInSize <= (inSize - 4)) {
         if (!parseHeader(U32_AT(buf + totalInSize), &frameSize)) {
-            return false;
+            return SYNCH_LOST_ERROR;
         }
         // Buffer needs to be large enough to include complete frame
         if ((frameSize > inSize) || (totalInSize > (inSize - frameSize))) {
-            return false;
+            return SYNCH_LOST_ERROR;
         }
         totalInSize += frameSize;
     }
 
-    return true;
+    return NO_DECODING_ERROR;
 }
 
 ERROR_CODE pvmp3_framedecoder(tPVMP3DecoderExternal *pExt,
@@ -348,10 +353,11 @@
     mp3Header info_data;
     mp3Header *info = &info_data;
 
-    if (!isInputValid(pExt->pInputBuffer, pExt->inputBufferCurrentLength))
+    errorCode = validate_input(pExt->pInputBuffer, pExt->inputBufferCurrentLength);
+    if (errorCode != NO_DECODING_ERROR)
     {
         pExt->outputFrameSize = 0;
-        return SYNCH_LOST_ERROR;
+        return errorCode;
     }
 
     pVars->inputStream.pBuffer  = pExt->pInputBuffer;
diff --git a/media/module/esds/tests/ESDSTest.cpp b/media/module/esds/tests/ESDSTest.cpp
index 33bdcac..ba64f60 100644
--- a/media/module/esds/tests/ESDSTest.cpp
+++ b/media/module/esds/tests/ESDSTest.cpp
@@ -52,7 +52,7 @@
                              /* BitrateMax */ int32_t,
                              /* BitrateAvg */ int32_t>> {
   public:
-    ESDSUnitTest() : mESDSData(nullptr) {
+    ESDSUnitTest() {
         mESDSParams.inputFile = get<0>(GetParam());
         mESDSParams.objectTypeIndication = get<1>(GetParam());
         mESDSParams.codecSpecificInfoData = get<2>(GetParam());
@@ -61,6 +61,13 @@
         mESDSParams.bitrateAvg = get<5>(GetParam());
     };
 
+    ~ESDSUnitTest() {
+        if (mESDSData != nullptr) {
+            free(mESDSData);
+            mESDSData = nullptr;
+        }
+    }
+
     virtual void TearDown() override {
         if (mDataSource) mDataSource.clear();
         if (mInputFp) {
@@ -70,8 +77,8 @@
     }
 
     virtual void SetUp() override { ASSERT_NO_FATAL_FAILURE(readESDSData()); }
-    const void *mESDSData;
-    size_t mESDSSize;
+    void *mESDSData = nullptr;
+    size_t mESDSSize = 0;
     ESDSParams mESDSParams;
 
   private:
@@ -105,10 +112,19 @@
     bool esdsDataPresent(size_t numTracks, sp<IMediaExtractor> extractor) {
         bool foundESDS = false;
         uint32_t type;
+        if (mESDSData != nullptr) {
+            free(mESDSData);
+            mESDSData = nullptr;
+        }
         for (size_t i = 0; i < numTracks; ++i) {
             sp<MetaData> trackMeta = extractor->getTrackMetaData(i);
+            const void *esdsData = nullptr;
+            size_t esdsSize = 0;
             if (trackMeta != nullptr &&
-                trackMeta->findData(kKeyESDS, &type, &mESDSData, &mESDSSize)) {
+                trackMeta->findData(kKeyESDS, &type, &esdsData, &esdsSize)) {
+                mESDSData = malloc(esdsSize);
+                mESDSSize = esdsSize;
+                memcpy(mESDSData, esdsData, esdsSize);
                 trackMeta->clear();
                 foundESDS = true;
                 break;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index b85d9de..4b0192a 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -28,7 +28,6 @@
 #include <media/AidlConversionUtil.h>
 #include <android/content/AttributionSourceState.h>
 
-#include <com_android_media_audio.h>
 #include <iterator>
 #include <algorithm>
 #include <pwd.h>
@@ -87,7 +86,7 @@
 }
 
 std::optional<AttributionSourceState> resolveAttributionSource(
-        const AttributionSourceState& callerAttributionSource) {
+        const AttributionSourceState& callerAttributionSource, const uint32_t virtualDeviceId) {
     AttributionSourceState nextAttributionSource = callerAttributionSource;
 
     if (!nextAttributionSource.packageName.has_value()) {
@@ -102,6 +101,7 @@
             return std::nullopt;
         }
     }
+    nextAttributionSource.deviceId = virtualDeviceId;
 
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
@@ -110,13 +110,15 @@
     // audioserver to the app ops system
     static sp<BBinder> appOpsToken = sp<BBinder>::make();
     myAttributionSource.token = appOpsToken;
+    myAttributionSource.deviceId = virtualDeviceId;
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
 }
 
-static bool checkRecordingInternal(const AttributionSourceState& attributionSource,
-        const String16& msg, bool start, audio_source_t source) {
+    static bool checkRecordingInternal(const AttributionSourceState &attributionSource,
+                                       const uint32_t virtualDeviceId,
+                                       const String16 &msg, bool start, audio_source_t source) {
     // Okay to not track in app ops as audio server or media server is us and if
     // device is rooted security model is considered compromised.
     // system_server loses its RECORD_AUDIO permission when a secondary
@@ -128,8 +130,8 @@
     // We specify a pid and uid here as mediaserver (aka MediaRecorder or StageFrightRecorder)
     // may open a record track on behalf of a client. Note that pid may be a tid.
     // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
-    const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource);
+    std::optional<AttributionSourceState> resolvedAttributionSource =
+            resolveAttributionSource(attributionSource, virtualDeviceId);
     if (!resolvedAttributionSource.has_value()) {
         return false;
     }
@@ -151,16 +153,30 @@
     return permitted;
 }
 
-bool recordingAllowed(const AttributionSourceState& attributionSource, audio_source_t source) {
-    return checkRecordingInternal(attributionSource, String16(), /*start*/ false, source);
+static constexpr int DEVICE_ID_DEFAULT = 0;
+
+bool recordingAllowed(const AttributionSourceState &attributionSource, audio_source_t source) {
+    return checkRecordingInternal(attributionSource, DEVICE_ID_DEFAULT, String16(), /*start*/ false,
+                                  source);
 }
 
-bool startRecording(const AttributionSourceState& attributionSource, const String16& msg,
-        audio_source_t source) {
-    return checkRecordingInternal(attributionSource, msg, /*start*/ true, source);
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+                      const uint32_t virtualDeviceId,
+                      audio_source_t source) {
+    return checkRecordingInternal(attributionSource, virtualDeviceId,
+                                  String16(), /*start*/ false, source);
 }
 
-void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source) {
+bool startRecording(const AttributionSourceState& attributionSource,
+                    const uint32_t virtualDeviceId,
+                    const String16& msg,
+                    audio_source_t source) {
+    return checkRecordingInternal(attributionSource, virtualDeviceId, msg, /*start*/ true,
+                                  source);
+}
+
+void finishRecording(const AttributionSourceState &attributionSource, uint32_t virtualDeviceId,
+                     audio_source_t source) {
     // Okay to not track in app ops as audio server is us and if
     // device is rooted security model is considered compromised.
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
@@ -170,7 +186,7 @@
     // may open a record track on behalf of a client. Note that pid may be a tid.
     // IMPORTANT: DON'T USE PermissionCache - RUNTIME PERMISSIONS CHANGE.
     const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource);
+            resolveAttributionSource(attributionSource, virtualDeviceId);
     if (!resolvedAttributionSource.has_value()) {
         return;
     }
@@ -389,16 +405,12 @@
  */
 bool mustAnonymizeBluetoothAddress(
         const AttributionSourceState& attributionSource, const String16& caller) {
-    if (!com::android::media::audio::bluetooth_mac_address_anonymization()) {
-        return false;
-    }
-
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     if (isAudioServerOrSystemServerUid(uid)) {
         return false;
     }
     const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource);
+            resolveAttributionSource(attributionSource, DEVICE_ID_DEFAULT);
     if (!resolvedAttributionSource.has_value()) {
         return true;
     }
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index bd9a462..0a047c1 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
index 15f043a..449e7de 100644
--- a/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
+++ b/media/utils/fuzzers/ServiceUtilitiesFuzz.cpp
@@ -53,6 +53,7 @@
     int32_t pid = data_provider.ConsumeIntegral<int32_t>();
     audio_source_t source = static_cast<audio_source_t>(data_provider
         .ConsumeIntegral<std::underlying_type_t<audio_source_t>>());
+    uint32_t deviceId = data_provider.ConsumeIntegral<uint32_t>();
 
     std::string packageNameStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
     std::string msgStr = data_provider.ConsumeRandomLengthString(kMaxStringLen);
@@ -70,8 +71,9 @@
     android::isAudioServerOrSystemServerUid(uid);
     android::isAudioServerOrMediaServerUid(uid);
     android::recordingAllowed(attributionSource);
-    android::startRecording(attributionSource, msgStr16, source);
-    android::finishRecording(attributionSource, source);
+    android::recordingAllowed(attributionSource, deviceId, source);
+    android::startRecording(attributionSource, deviceId, msgStr16, source);
+    android::finishRecording(attributionSource, deviceId, source);
     android::captureAudioOutputAllowed(attributionSource);
     android::captureMediaOutputAllowed(attributionSource);
     android::captureHotwordAllowed(attributionSource);
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 0b3a3f9..9c02cd4 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -87,11 +87,16 @@
 
 bool recordingAllowed(const AttributionSourceState& attributionSource,
         audio_source_t source = AUDIO_SOURCE_DEFAULT);
-bool startRecording(const AttributionSourceState& attributionSource,
-    const String16& msg, audio_source_t source);
-void finishRecording(const AttributionSourceState& attributionSource, audio_source_t source);
+
+bool recordingAllowed(const AttributionSourceState &attributionSource,
+                      uint32_t virtualDeviceId,
+                      audio_source_t source);
+bool startRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+                    const String16& msg, audio_source_t source);
+void finishRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+                     audio_source_t source);
 std::optional<AttributionSourceState> resolveAttributionSource(
-    const AttributionSourceState& callerAttributionSource);
+    const AttributionSourceState& callerAttributionSource, uint32_t virtualDeviceId);
 bool captureAudioOutputAllowed(const AttributionSourceState& attributionSource);
 bool captureMediaOutputAllowed(const AttributionSourceState& attributionSource);
 bool captureTunerAudioInputAllowed(const AttributionSourceState& attributionSource);
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 3fdc6eb..a68569a 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 34395d8..9016420 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -145,6 +145,7 @@
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "av-types-aidl-cpp",
+        "com.android.media.audio-aconfig-cc",
         "effect-aidl-cpp",
         "libactivitymanager_aidl",
         "libaudioclient",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 97c80a8..d5d778f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -311,7 +311,8 @@
     }
 
     mPatchPanel = IAfPatchPanel::create(sp<IAfPatchPanelCallback>::fromExisting(this));
-    mMelReporter = sp<MelReporter>::make(sp<IAfMelReporterCallback>::fromExisting(this));
+    mMelReporter = sp<MelReporter>::make(sp<IAfMelReporterCallback>::fromExisting(this),
+                                         mPatchPanel);
 }
 
 status_t AudioFlinger::setAudioHalPids(const std::vector<pid_t>& pids) {
@@ -1078,6 +1079,7 @@
         client = registerPid(clientPid);
 
         IAfPlaybackThread* effectThread = nullptr;
+        sp<IAfEffectChain> effectChain = nullptr;
         // check if an effect chain with the same session ID is present on another
         // output thread and move it here.
         for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -1090,6 +1092,10 @@
                 }
             }
         }
+        // Check if an orphan effect chain exists for this session
+        if (effectThread == nullptr) {
+            effectChain = getOrphanEffectChain_l(sessionId);
+        }
         ALOGV("createTrack() sessionId: %d", sessionId);
 
         output.sampleRate = input.config.sample_rate;
@@ -1134,6 +1140,13 @@
                     effectIds = thread->getEffectIds_l(sessionId);
                 }
             }
+            if (effectChain != nullptr) {
+                if (moveEffectChain_ll(sessionId, nullptr, thread, effectChain.get())
+                        == NO_ERROR) {
+                    effectThreadId = thread->id();
+                    effectIds = thread->getEffectIds_l(sessionId);
+                }
+            }
         }
 
         // Look for sync events awaiting for a session to be used.
@@ -2175,30 +2188,24 @@
     sp<IAfThreadBase> thread;
 
     for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
-        if (mPlaybackThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mPlaybackThreads.valueAt(i);
+        thread = mPlaybackThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    if (thread != nullptr) {
-        return thread;
-    }
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
-        if (mRecordThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mRecordThreads.valueAt(i);
+        thread = mRecordThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    if (thread != nullptr) {
-        return thread;
-    }
     for (size_t i = 0; i < mMmapThreads.size(); i++) {
-        if (mMmapThreads.valueAt(i)->getEffect(sessionId, effectId) != 0) {
-            ALOG_ASSERT(thread == 0);
-            thread = mMmapThreads.valueAt(i);
+        thread = mMmapThreads.valueAt(i);
+        if (thread->getEffect(sessionId, effectId) != 0) {
+            return thread;
         }
     }
-    return thread;
+    return nullptr;
 }
 
 // ----------------------------------------------------------------------------
@@ -3064,6 +3071,25 @@
 
 
             mPlaybackThreads.removeItem(output);
+            // Save AUDIO_SESSION_OUTPUT_MIX effect to orphan chains
+            // Output Mix Effect session is used to manage Music Effect by AudioPolicy Manager.
+            // It exists across all playback threads.
+            if (playbackThread->type() == IAfThreadBase::MIXER
+                    || playbackThread->type() == IAfThreadBase::OFFLOAD
+                    || playbackThread->type() == IAfThreadBase::SPATIALIZER) {
+                sp<IAfEffectChain> mixChain;
+                {
+                    audio_utils::scoped_lock sl(playbackThread->mutex());
+                    mixChain = playbackThread->getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
+                    if (mixChain != nullptr) {
+                        ALOGW("%s() output %d moving mix session to orphans", __func__, output);
+                        playbackThread->removeEffectChain_l(mixChain);
+                    }
+                }
+                if (mixChain != nullptr) {
+                    putOrphanEffectChain_l(mixChain);
+                }
+            }
             // save all effects to the default thread
             if (mPlaybackThreads.size()) {
                 IAfPlaybackThread* const dstThread =
@@ -4150,7 +4176,7 @@
         }
 
         // Only audio policy service can create a spatializer effect
-        if ((memcmp(&descOut.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0) &&
+        if (IAfEffectModule::isSpatializer(&descOut.type) &&
             (callingUid != AID_AUDIOSERVER || currentPid != getpid())) {
             ALOGW("%s: attempt to create a spatializer effect from uid/pid %d/%d",
                     __func__, callingUid, currentPid);
@@ -4208,7 +4234,9 @@
             // before creating the AudioEffect or the io handle must be specified.
             //
             // Detect if the effect is created after an AudioRecord is destroyed.
-            if (getOrphanEffectChain_l(sessionId).get() != nullptr) {
+            if (sessionId != AUDIO_SESSION_OUTPUT_MIX
+                  && ((descOut.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)
+                  && getOrphanEffectChain_l(sessionId).get() != nullptr) {
                 ALOGE("%s: effect %s with no specified io handle is denied because the AudioRecord"
                       " for session %d no longer exists",
                       __func__, descOut.name, sessionId);
@@ -4219,11 +4247,27 @@
             // Legacy handling of creating an effect on an expired or made-up
             // session id.  We think that it is a Playback effect.
             //
-            // If no output thread contains the requested session ID, default to
-            // first output. The effect chain will be moved to the correct output
-            // thread when a track with the same session ID is created
-            if (io == AUDIO_IO_HANDLE_NONE && mPlaybackThreads.size() > 0) {
-                io = mPlaybackThreads.keyAt(0);
+            // If no output thread contains the requested session ID, park the effect to
+            // the orphan chains. The effect chain will be moved to the correct output
+            // thread when a track with the same session ID is created.
+            if (io == AUDIO_IO_HANDLE_NONE) {
+                if (probe) {
+                    // In probe mode, as no compatible thread found, exit with error.
+                    lStatus = BAD_VALUE;
+                    goto Exit;
+                }
+                ALOGV("%s() got io %d for effect %s", __func__, io, descOut.name);
+                sp<Client> client = registerPid(currentPid);
+                bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+                handle = createOrphanEffect_l(client, effectClient, priority, sessionId,
+                                              &descOut, &enabledOut, &lStatus, pinned,
+                                              request.notifyFramesProcessed);
+                if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+                    // remove local strong reference to Client with clientMutex() held
+                    audio_utils::lock_guard _cl(clientMutex());
+                    client.clear();
+                }
+                goto Register;
             }
             ALOGV("createEffect() got io %d for effect %s", io, descOut.name);
         } else if (checkPlaybackThread_l(io) != nullptr
@@ -4264,7 +4308,8 @@
                     goto Exit;
                 }
             }
-        } else {
+        }
+        if (thread->type() == IAfThreadBase::RECORD || sessionId == AUDIO_SESSION_OUTPUT_MIX) {
             // Check if one effect chain was awaiting for an effect to be created on this
             // session and used it instead of creating a new one.
             sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
@@ -4340,6 +4385,85 @@
     return lStatus;
 }
 
+sp<IAfEffectHandle> AudioFlinger::createOrphanEffect_l(
+        const sp<Client>& client,
+        const sp<IEffectClient>& effectClient,
+        int32_t priority,
+        audio_session_t sessionId,
+        effect_descriptor_t *desc,
+        int *enabled,
+        status_t *status,
+        bool pinned,
+        bool notifyFramesProcessed)
+{
+    ALOGV("%s effectClient %p, priority %d, sessionId %d, factory %p",
+          __func__, effectClient.get(), priority, sessionId, mEffectsFactoryHal.get());
+
+    // Check if an orphan effect chain exists for this session or create new chain for this session
+    sp<IAfEffectModule> effect;
+    sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
+    bool chainCreated = false;
+    if (chain == nullptr) {
+        chain = IAfEffectChain::create(/* ThreadBase= */ nullptr, sessionId, this);
+        chainCreated = true;
+    } else {
+        effect = chain->getEffectFromDesc(desc);
+    }
+    bool effectCreated = false;
+    if (effect == nullptr) {
+        audio_unique_id_t effectId = nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
+        // create a new effect module if none present in the chain
+        status_t llStatus =
+                chain->createEffect(effect, desc, effectId, sessionId, pinned);
+        if (llStatus != NO_ERROR) {
+            *status = llStatus;
+            // if the effect chain was not created here, put it back
+            if (!chainCreated) {
+                putOrphanEffectChain_l(chain);
+            }
+            return nullptr;
+        }
+        effect->setMode(getMode());
+
+        if (effect->isHapticGenerator()) {
+            // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
+            // for the HapticGenerator.
+            const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
+                    std::move(getDefaultVibratorInfo_l());
+            if (defaultVibratorInfo) {
+                // Only set the vibrator info when it is a valid one.
+                audio_utils::lock_guard _cl(chain->mutex());
+                effect->setVibratorInfo_l(*defaultVibratorInfo);
+            }
+        }
+        effectCreated = true;
+    }
+    // create effect handle and connect it to effect module
+    sp<IAfEffectHandle> handle =
+            IAfEffectHandle::create(effect, client, effectClient, priority, notifyFramesProcessed);
+    status_t lStatus = handle->initCheck();
+    if (lStatus == OK) {
+        lStatus = effect->addHandle(handle.get());
+    }
+    // in case of lStatus error, EffectHandle will still return and caller should do the clear
+    if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+        if (effectCreated) {
+            chain->removeEffect(effect);
+        }
+        // if the effect chain was not created here, put it back
+        if (!chainCreated) {
+            putOrphanEffectChain_l(chain);
+        }
+    } else {
+        if (enabled != NULL) {
+            *enabled = (int)effect->isEnabled();
+        }
+        putOrphanEffectChain_l(chain);
+    }
+    *status = lStatus;
+    return handle;
+}
+
 status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
         audio_io_handle_t dstIo)
 NO_THREAD_SAFETY_ANALYSIS
@@ -4368,17 +4492,39 @@
         }
         return ret;
     }
-    IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcIo);
-    if (srcThread == nullptr) {
-        ALOGW("%s() bad srcIo %d", __func__, srcIo);
-        return BAD_VALUE;
-    }
-    IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstIo);
+
+    IAfPlaybackThread* dstThread = checkPlaybackThread_l(dstIo);
     if (dstThread == nullptr) {
         ALOGW("%s() bad dstIo %d", __func__, dstIo);
         return BAD_VALUE;
     }
 
+    IAfPlaybackThread* srcThread = checkPlaybackThread_l(srcIo);
+    sp<IAfEffectChain> orphanChain = getOrphanEffectChain_l(sessionId);
+    if (srcThread == nullptr && orphanChain == nullptr && sessionId == AUDIO_SESSION_OUTPUT_MIX) {
+        ALOGW("%s() AUDIO_SESSION_OUTPUT_MIX not found in orphans, checking other mix", __func__);
+        for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
+            const sp<IAfPlaybackThread> pt = mPlaybackThreads.valueAt(i);
+            const uint32_t sessionType = pt->hasAudioSession(AUDIO_SESSION_OUTPUT_MIX);
+            if ((pt->type() == IAfThreadBase::MIXER || pt->type() == IAfThreadBase::OFFLOAD) &&
+                    ((sessionType & IAfThreadBase::EFFECT_SESSION) != 0)) {
+                srcThread = pt.get();
+                ALOGW("%s() found srcOutput %d hosting AUDIO_SESSION_OUTPUT_MIX", __func__,
+                      pt->id());
+                break;
+            }
+        }
+    }
+    if (srcThread == nullptr && orphanChain == nullptr) {
+        ALOGW("moveEffects() bad srcIo %d", srcIo);
+        return BAD_VALUE;
+    }
+    // dstThread pointer validity has already been checked
+    if (orphanChain != nullptr) {
+        audio_utils::scoped_lock _ll(dstThread->mutex());
+        return moveEffectChain_ll(sessionId, nullptr, dstThread, orphanChain.get());
+    }
+    // srcThread pointer validity has already been checked
     audio_utils::scoped_lock _ll(dstThread->mutex(), srcThread->mutex());
     return moveEffectChain_ll(sessionId, srcThread, dstThread);
 }
@@ -4392,23 +4538,29 @@
 
     sp<IAfThreadBase> thread = getEffectThread_l(sessionId, effectId);
     if (thread == nullptr) {
-      return;
+        return;
     }
     audio_utils::lock_guard _sl(thread->mutex());
-    sp<IAfEffectModule> effect = thread->getEffect_l(sessionId, effectId);
-    thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+    if (const auto& effect = thread->getEffect_l(sessionId, effectId)) {
+        thread->setEffectSuspended_l(&effect->desc().type, suspended, sessionId);
+    }
 }
 
 
 // moveEffectChain_ll must be called with the AudioFlinger::mutex()
 // and both srcThread and dstThread mutex()s held
 status_t AudioFlinger::moveEffectChain_ll(audio_session_t sessionId,
-        IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread)
+        IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread,
+        IAfEffectChain* srcChain)
 {
-    ALOGV("%s: session %d from thread %p to thread %p",
-            __func__, sessionId, srcThread, dstThread);
+    ALOGV("%s: session %d from thread %p to thread %p %s",
+            __func__, sessionId, srcThread, dstThread,
+            (srcChain != nullptr ? "from specific chain" : ""));
+    ALOG_ASSERT((srcThread != nullptr) != (srcChain != nullptr),
+                "no source provided for source chain");
 
-    sp<IAfEffectChain> chain = srcThread->getEffectChain_l(sessionId);
+    sp<IAfEffectChain> chain =
+          srcChain != nullptr ? srcChain : srcThread->getEffectChain_l(sessionId);
     if (chain == 0) {
         ALOGW("%s: effect chain for session %d not on source thread %p",
                 __func__, sessionId, srcThread);
@@ -4428,8 +4580,9 @@
     // otherwise unnecessary as removeEffect_l() will remove the chain when last effect is
     // removed.
     // TODO(b/216875016): consider holding the effect chain locks for the duration of the move.
-    srcThread->removeEffectChain_l(chain);
-
+    if (srcThread != nullptr) {
+        srcThread->removeEffectChain_l(chain);
+    }
     // transfer all effects one by one so that new effect chain is created on new thread with
     // correct buffer sizes and audio parameters and effect engines reconfigured accordingly
     sp<IAfEffectChain> dstChain;
@@ -4439,7 +4592,11 @@
     // process effects one by one.
     for (sp<IAfEffectModule> effect = chain->getEffectFromId_l(0); effect != nullptr;
             effect = chain->getEffectFromId_l(0)) {
-        srcThread->removeEffect_l(effect);
+        if (srcThread != nullptr) {
+            srcThread->removeEffect_l(effect);
+        } else {
+            chain->removeEffect(effect);
+        }
         removed.add(effect);
         status = dstThread->addEffect_ll(effect);
         if (status != NO_ERROR) {
@@ -4467,7 +4624,7 @@
         for (const auto& effect : removed) {
             dstThread->removeEffect_l(effect); // Note: Depending on error location, the last
                                                // effect may not have been placed on dstThread.
-            if (srcThread->addEffect_ll(effect) == NO_ERROR) {
+            if (srcThread != nullptr && srcThread->addEffect_ll(effect) == NO_ERROR) {
                 ++restored;
                 if (dstChain == nullptr) {
                     dstChain = effect->getCallback()->chain().promote();
@@ -4488,7 +4645,7 @@
             if (effect->state() == IAfEffectModule::ACTIVE ||
                     effect->state() == IAfEffectModule::STOPPING) {
                 ++started;
-                effect->start();
+                effect->start_l();
             }
         }
         dstChain->mutex().unlock();
@@ -4498,15 +4655,19 @@
         if (errorString.empty()) {
             errorString = StringPrintf("%s: failed status %d", __func__, status);
         }
-        ALOGW("%s: %s unsuccessful move of session %d from srcThread %p to dstThread %p "
+        ALOGW("%s: %s unsuccessful move of session %d from %s %p to dstThread %p "
                 "(%zu effects removed from srcThread, %zu effects restored to srcThread, "
                 "%zu effects started)",
-                __func__, errorString.c_str(), sessionId, srcThread, dstThread,
+                __func__, errorString.c_str(), sessionId,
+                (srcThread != nullptr ? "srcThread" : "srcChain"),
+                (srcThread != nullptr ? (void*) srcThread : (void*) srcChain), dstThread,
                 removed.size(), restored, started);
     } else {
-        ALOGD("%s: successful move of session %d from srcThread %p to dstThread %p "
+        ALOGD("%s: successful move of session %d from %s %p to dstThread %p "
                 "(%zu effects moved, %zu effects started)",
-                __func__, sessionId, srcThread, dstThread, removed.size(), started);
+                __func__, sessionId, (srcThread != nullptr ? "srcThread" : "srcChain"),
+                (srcThread != nullptr ? (void*) srcThread : (void*) srcChain), dstThread,
+                removed.size(), started);
     }
     return status;
 }
@@ -4591,7 +4752,7 @@
         // removeEffect_l() has stopped the effect if it was active so it must be restarted
         if (effect->state() == IAfEffectModule::ACTIVE ||
             effect->state() == IAfEffectModule::STOPPING) {
-            effect->start();
+            effect->start_l();
         }
     }
 
@@ -4677,7 +4838,7 @@
     ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
     if (index >= 0) {
         sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
-        if (chain->removeEffect_l(effect, true) == 0) {
+        if (chain->removeEffect(effect, true) == 0) {
             ALOGV("updateOrphanEffectChains removing effect chain at index %zd", index);
             mOrphanEffectChains.removeItemsAt(index);
         }
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 0f75d6e..4e46bea 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -373,7 +373,8 @@
             EXCLUDES_AudioFlinger_Mutex;
 
     status_t moveEffectChain_ll(audio_session_t sessionId,
-            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread) final
+            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread,
+            IAfEffectChain* srcChain = nullptr) final
             REQUIRES(mutex(), audio_utils::ThreadBase_Mutex);
 
     // This is a helper that is called during incoming binder calls.
@@ -702,6 +703,16 @@
 
     sp<Client> registerPid(pid_t pid) EXCLUDES_AudioFlinger_ClientMutex; // always returns non-0
 
+    sp<IAfEffectHandle> createOrphanEffect_l(const sp<Client>& client,
+                                          const sp<media::IEffectClient>& effectClient,
+                                          int32_t priority,
+                                          audio_session_t sessionId,
+                                          effect_descriptor_t *desc,
+                                          int *enabled,
+                                          status_t *status /*non-NULL*/,
+                                          bool pinned,
+                                          bool notifyFramesProcessed) REQUIRES(mutex());
+
     // for use from destructor
     status_t closeOutput_nonvirtual(audio_io_handle_t output) EXCLUDES_AudioFlinger_Mutex;
     status_t closeInput_nonvirtual(audio_io_handle_t input) EXCLUDES_AudioFlinger_Mutex;
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index 201d147..feae97e 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -143,7 +143,7 @@
         if (lStatus == NO_ERROR) {
             lStatus = effect->addHandle(handle.get());
             if (lStatus == NO_ERROR) {
-                lStatus = effect->init(patches);
+                lStatus = effect->init_l(patches);
                 if (lStatus == NAME_NOT_FOUND) {
                     lStatus = NO_ERROR;
                 }
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 7045c8b..287d838 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -139,7 +139,7 @@
     // check if effects should be suspended or restored when a given effect is enable or disabled
     void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                           bool enabled __unused, bool threadLocked __unused) final {}
-    void resetVolume() final {}
+    void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex) {}
     product_strategy_t strategy() const final { return static_cast<product_strategy_t>(0); }
     int32_t activeTrackCnt() const final { return 0; }
     void onEffectEnable(const sp<IAfEffectBase>& effect __unused) final {}
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index ae55329..9406bcd 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -562,24 +562,21 @@
 #undef LOG_TAG
 #define LOG_TAG "EffectModule"
 
-EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback,
-                                         effect_descriptor_t *desc,
-                                         int id,
-                                         audio_session_t sessionId,
-                                         bool pinned,
-                                         audio_port_handle_t deviceId)
+EffectModule::EffectModule(const sp<EffectCallbackInterface>& callback, effect_descriptor_t* desc,
+                           int id, audio_session_t sessionId, bool pinned,
+                           audio_port_handle_t deviceId)
     : EffectBase(callback, desc, id, sessionId, pinned),
       // clear mConfig to ensure consistent initial value of buffer framecount
       // in case buffers are associated by setInBuffer() or setOutBuffer()
-      // prior to configure().
+      // prior to configure_l().
       mConfig{{}, {}},
       mStatus(NO_INIT),
-      mMaxDisableWaitCnt(1), // set by configure(), should be >= 1
+      mMaxDisableWaitCnt(1), // set by configure_l(), should be >= 1
       mDisableWaitCnt(0),    // set by process() and updateState()
       mOffloaded(false),
-      mIsOutput(false)
-      , mSupportsFloat(false)
-{
+      mIsOutput(false),
+      mSupportsFloat(false),
+      mEffectInterfaceDebug(desc->name) {
     ALOGV("Constructor %p pinned %d", this, pinned);
     int lStatus;
 
@@ -587,21 +584,24 @@
     mStatus = callback->createEffectHal(
             &desc->uuid, sessionId, deviceId, &mEffectInterface);
     if (mStatus != NO_ERROR) {
+        ALOGE("%s createEffectHal failed: %d", __func__, mStatus);
         return;
     }
-    lStatus = init();
+    lStatus = init_l();
     if (lStatus < 0) {
         mStatus = lStatus;
         goto Error;
     }
 
-    setOffloaded(callback->isOffload(), callback->io());
-    ALOGV("Constructor success name %s, Interface %p", mDescriptor.name, mEffectInterface.get());
+    setOffloaded_l(callback->isOffload(), callback->io());
+    ALOGV("%s Constructor success name %s, Interface %p", __func__, mDescriptor.name,
+          mEffectInterface.get());
 
     return;
 Error:
     mEffectInterface.clear();
-    ALOGV("Constructor Error %d", mStatus);
+    mEffectInterfaceDebug += " init failed:" + std::to_string(lStatus);
+    ALOGE("%s Constructor Error %d", __func__, mStatus);
 }
 
 EffectModule::~EffectModule()
@@ -612,12 +612,12 @@
         AudioEffect::guidToString(&mDescriptor.uuid, uuidStr, sizeof(uuidStr));
         ALOGW("EffectModule %p destructor called with unreleased interface, effect %s",
                 this, uuidStr);
-        release_l();
+        release_l("~EffectModule");
     }
 
 }
 
-bool EffectModule::updateState() {
+bool EffectModule::updateState_l() {
     audio_utils::lock_guard _l(mutex());
 
     bool started = false;
@@ -633,7 +633,7 @@
                    0,
                    mConfig.inputCfg.buffer.frameCount*sizeof(int32_t));
         }
-        if (start_l() == NO_ERROR) {
+        if (start_ll() == NO_ERROR) {
             mState = ACTIVE;
             started = true;
         } else {
@@ -642,8 +642,8 @@
         break;
     case STOPPING:
         // volume control for offload and direct threads must take effect immediately.
-        if (stop_l() == NO_ERROR
-            && !(isVolumeControl() && isOffloadedOrDirect())) {
+        if (stop_ll() == NO_ERROR
+            && !(isVolumeControl() && isOffloadedOrDirect_l())) {
             mDisableWaitCnt = mMaxDisableWaitCnt;
         } else {
             mDisableWaitCnt = 1; // will cause immediate transition to IDLE
@@ -837,9 +837,9 @@
     mEffectInterface->command(EFFECT_CMD_RESET, 0, NULL, &replySize, &reply);
 }
 
-status_t EffectModule::configure()
+status_t EffectModule::configure_l()
 {
-    ALOGVV("configure() started");
+    ALOGVV("%s started", __func__);
     status_t status;
     uint32_t size;
     audio_channel_mask_t channelMask;
@@ -880,7 +880,7 @@
     mConfig.outputCfg.format = AUDIO_FORMAT_PCM_FLOAT;
 
     // Don't use sample rate for thread if effect isn't offloadable.
-    if (callback->isOffloadOrDirect() && !isOffloaded()) {
+    if (callback->isOffloadOrDirect() && !isOffloaded_l()) {
         mConfig.inputCfg.samplingRate = DEFAULT_OUTPUT_SAMPLE_RATE;
         ALOGV("Overriding effect input as 48kHz");
     } else {
@@ -910,9 +910,9 @@
     mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
     mIsOutput = callback->isOutput();
 
-    ALOGV("configure() %p chain %p buffer %p framecount %zu",
-          this, callback->chain().promote().get(),
-          mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
+    ALOGV("%s %p chain %p buffer %p framecount %zu", __func__, this,
+          callback->chain().promote().get(), mConfig.inputCfg.buffer.raw,
+          mConfig.inputCfg.buffer.frameCount);
 
     status_t cmdStatus;
     size = sizeof(int);
@@ -1013,11 +1013,11 @@
 exit:
     // TODO: consider clearing mConfig on error.
     mStatus = status;
-    ALOGVV("configure ended");
+    ALOGVV("%s ended", __func__);
     return status;
 }
 
-status_t EffectModule::init()
+status_t EffectModule::init_l()
 {
     audio_utils::lock_guard _l(mutex());
     if (mEffectInterface == 0) {
@@ -1049,21 +1049,21 @@
     }
 }
 
-// start() must be called with PlaybackThread::mutex() or EffectChain::mutex() held
-status_t EffectModule::start()
+// start_l() must be called with EffectChain::mutex() held
+status_t EffectModule::start_l()
 {
     status_t status;
     {
         audio_utils::lock_guard _l(mutex());
-        status = start_l();
+        status = start_ll();
     }
     if (status == NO_ERROR) {
-        getCallback()->resetVolume();
+        getCallback()->resetVolume_l();
     }
     return status;
 }
 
-status_t EffectModule::start_l()
+status_t EffectModule::start_ll()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1087,13 +1087,13 @@
     return status;
 }
 
-status_t EffectModule::stop()
+status_t EffectModule::stop_l()
 {
     audio_utils::lock_guard _l(mutex());
-    return stop_l();
+    return stop_ll();
 }
 
-status_t EffectModule::stop_l()
+status_t EffectModule::stop_ll()
 {
     if (mEffectInterface == 0) {
         return NO_INIT;
@@ -1104,11 +1104,11 @@
     status_t cmdStatus = NO_ERROR;
     uint32_t size = sizeof(status_t);
 
-    if (isVolumeControl() && isOffloadedOrDirect()) {
+    if (isVolumeControl() && isOffloadedOrDirect_l()) {
         // We have the EffectChain and EffectModule lock, permit a reentrant call to setVolume:
         // resetVolume_l --> setVolume_l --> EffectModule::setVolume
         mSetVolumeReentrantTid = gettid();
-        getCallback()->resetVolume();
+        getCallback()->resetVolume_l();
         mSetVolumeReentrantTid = INVALID_PID;
     }
 
@@ -1127,13 +1127,14 @@
 }
 
 // must be called with EffectChain::mutex() held
-void EffectModule::release_l()
+void EffectModule::release_l(const std::string& from)
 {
     if (mEffectInterface != 0) {
         removeEffectFromHal_l();
         // release effect engine
         mEffectInterface->close();
         mEffectInterface.clear();
+        mEffectInterfaceDebug += " released by: " + from;
     }
 }
 
@@ -1163,7 +1164,7 @@
                      std::vector<uint8_t>* reply)
 {
     audio_utils::lock_guard _l(mutex());
-    ALOGVV("command(), cmdCode: %d, mEffectInterface: %p", cmdCode, mEffectInterface.get());
+    ALOGVV("%s, cmdCode: %d, mEffectInterface: %p", __func__, cmdCode, mEffectInterface.get());
 
     if (mState == DESTROYED || mEffectInterface == 0) {
         return NO_INIT;
@@ -1259,20 +1260,20 @@
     }
 }
 
-bool EffectModule::isOffloadedOrDirect() const
+bool EffectModule::isOffloadedOrDirect_l() const
 {
     return getCallback()->isOffloadOrDirect();
 }
 
-bool EffectModule::isVolumeControlEnabled() const
+bool EffectModule::isVolumeControlEnabled_l() const
 {
-    return (isVolumeControl() && (isOffloadedOrDirect() ? isEnabled() : isProcessEnabled()));
+    return (isVolumeControl() && (isOffloadedOrDirect_l() ? isEnabled() : isProcessEnabled()));
 }
 
 void EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setInBuffer %p",(&buffer));
 
-    // mConfig.inputCfg.buffer.frameCount may be zero if configure() is not called yet.
+    // mConfig.inputCfg.buffer.frameCount may be zero if configure_l() is not called yet.
     if (buffer != 0) {
         mConfig.inputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
@@ -1318,7 +1319,7 @@
 void EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
     ALOGVV("setOutBuffer %p",(&buffer));
 
-    // mConfig.outputCfg.buffer.frameCount may be zero if configure() is not called yet.
+    // mConfig.outputCfg.buffer.frameCount may be zero if configure_l() is not called yet.
     if (buffer != 0) {
         mConfig.outputCfg.buffer.raw = buffer->audioBuffer()->raw;
         buffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
@@ -1357,8 +1358,7 @@
     }
 }
 
-status_t EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
-{
+status_t EffectModule::setVolume(uint32_t* left, uint32_t* right, bool controller, bool force) {
     AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
         return mStatus;
@@ -1366,7 +1366,7 @@
     status_t status = NO_ERROR;
     // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume
     // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set)
-    if (isProcessEnabled() &&
+    if ((isProcessEnabled() || force) &&
             ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_MONITOR)) {
@@ -1377,6 +1377,10 @@
 
 status_t EffectModule::setVolumeInternal(
         uint32_t *left, uint32_t *right, bool controller) {
+    if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1]) {
+        return NO_ERROR;
+    }
+    LOG_ALWAYS_FATAL_IF(mEffectInterface == nullptr, "%s", mEffectInterfaceDebug.c_str());
     uint32_t volume[2] = {*left, *right};
     uint32_t *pVolume = controller ? volume : nullptr;
     uint32_t size = sizeof(volume);
@@ -1386,6 +1390,7 @@
                                                 &size,
                                                 pVolume);
     if (controller && status == NO_ERROR && size == sizeof(volume)) {
+        mVolume = {*left, *right}; // Cache the value that has been set
         *left = volume[0];
         *right = volume[1];
     }
@@ -1481,7 +1486,7 @@
     return status;
 }
 
-status_t EffectModule::setOffloaded(bool offloaded, audio_io_handle_t io)
+status_t EffectModule::setOffloaded_l(bool offloaded, audio_io_handle_t io)
 {
     audio_utils::lock_guard _l(mutex());
     if (mStatus != NO_ERROR) {
@@ -1510,11 +1515,11 @@
         }
         mOffloaded = false;
     }
-    ALOGV("setOffloaded() offloaded %d io %d status %d", offloaded, io, status);
+    ALOGV("%s offloaded %d io %d status %d", __func__, offloaded, io, status);
     return status;
 }
 
-bool EffectModule::isOffloaded() const
+bool EffectModule::isOffloaded_l() const
 {
     audio_utils::lock_guard _l(mutex());
     return mOffloaded;
@@ -1529,8 +1534,16 @@
     return IAfEffectModule::isHapticGenerator(&mDescriptor.type);
 }
 
-status_t EffectModule::setHapticIntensity(int id, os::HapticScale intensity)
-{
+/*static*/
+bool IAfEffectModule::isSpatializer(const effect_uuid_t *type) {
+    return memcmp(type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+}
+
+bool EffectModule::isSpatializer() const {
+    return IAfEffectModule::isSpatializer(&mDescriptor.type);
+}
+
+status_t EffectModule::setHapticScale_l(int id, os::HapticScale hapticScale) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1539,13 +1552,16 @@
         return INVALID_OPERATION;
     }
 
-    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t));
+    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t) + sizeof(float));
     effect_param_t *param = (effect_param_t*) request.data();
     param->psize = sizeof(int32_t);
-    param->vsize = sizeof(int32_t) * 2;
+    param->vsize = sizeof(int32_t) * 2 + sizeof(float);
     *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
-    *((int32_t*)param->data + 1) = id;
-    *((int32_t*)param->data + 2) = static_cast<int32_t>(intensity);
+    int32_t* hapticScalePtr = reinterpret_cast<int32_t*>(param->data + sizeof(int32_t));
+    hapticScalePtr[0] = id;
+    hapticScalePtr[1] = static_cast<int32_t>(hapticScale.getLevel());
+    float* adaptiveScaleFactorPtr = reinterpret_cast<float*>(param->data + 3 * sizeof(int32_t));
+    *adaptiveScaleFactorPtr = hapticScale.getAdaptiveScaleFactor();
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1555,8 +1571,7 @@
     return status;
 }
 
-status_t EffectModule::setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo)
-{
+status_t EffectModule::setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) {
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
@@ -1585,8 +1600,8 @@
     return status;
 }
 
-status_t EffectModule::getConfigs(
-        audio_config_base_t* inputCfg, audio_config_base_t* outputCfg, bool* isOutput) const {
+status_t EffectModule::getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                                    bool* isOutput) const {
     audio_utils::lock_guard _l(mutex());
     if (mConfig.inputCfg.mask == 0 || mConfig.outputCfg.mask == 0) {
         return NO_INIT;
@@ -1601,6 +1616,35 @@
     return NO_ERROR;
 }
 
+status_t EffectModule::sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) {
+    if (mStatus != NO_ERROR) {
+        return mStatus;
+    }
+    // TODO b/307368176: send all metadata to effects if requested by the implementation.
+    // For now only send channel mask to Spatializer.
+    if (!isSpatializer()) {
+        return INVALID_OPERATION;
+    }
+
+    std::vector<uint8_t> request(
+            sizeof(effect_param_t) + sizeof(int32_t) + metadata.size() * sizeof(uint32_t));
+    effect_param_t *param = (effect_param_t*) request.data();
+    param->psize = sizeof(int32_t);
+    param->vsize = metadata.size() * sizeof(uint32_t);
+    *(int32_t*)param->data = SPATIALIZER_PARAM_INPUT_CHANNEL_MASK;
+    uint32_t* channelMasks = reinterpret_cast<uint32_t*>(param->data + sizeof(int32_t));
+    for (auto m : metadata) {
+        *channelMasks++ = m.channel_mask;
+    }
+    std::vector<uint8_t> response;
+    status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
+    if (status == NO_ERROR) {
+        LOG_ALWAYS_FATAL_IF(response.size() != sizeof(status_t));
+        status = *reinterpret_cast<const status_t*>(response.data());
+    }
+    return status;
+}
+
 static std::string dumpInOutBuffer(bool isInput, const sp<EffectBufferHalInterface> &buffer) {
     std::stringstream ss;
 
@@ -1699,6 +1743,7 @@
 {
     ALOGV("constructor %p client %p", this, client.get());
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 
     if (client == 0) {
         return;
@@ -1912,7 +1957,7 @@
     audio_config_base_t inputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
     audio_config_base_t outputCfg = AUDIO_CONFIG_BASE_INITIALIZER;
     bool isOutput;
-    status_t status = effectModule->getConfigs(&inputCfg, &outputCfg, &isOutput);
+    status_t status = effectModule->getConfigs_l(&inputCfg, &outputCfg, &isOutput);
     if (status == NO_ERROR) {
         constexpr bool isInput = false; // effects always use 'OUT' channel masks.
         _config->inputCfg = VALUE_OR_RETURN_STATUS_AS_OUT(
@@ -2119,27 +2164,31 @@
 /* static */
 sp<IAfEffectChain> IAfEffectChain::create(
         const sp<IAfThreadBase>& thread,
-        audio_session_t sessionId)
+        audio_session_t sessionId,
+        const sp<IAfThreadCallback>& afThreadCallback)
 {
-    return sp<EffectChain>::make(thread, sessionId);
+    return sp<EffectChain>::make(thread, sessionId, afThreadCallback);
 }
 
-EffectChain::EffectChain(const sp<IAfThreadBase>& thread,
-                                       audio_session_t sessionId)
+EffectChain::EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId,
+                         const sp<IAfThreadCallback>& afThreadCallback)
     : mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
       mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
       mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
-      mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
+      mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread, afThreadCallback))
 {
-    mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
-    mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
-                                    thread->frameCount();
+    if (thread != nullptr) {
+        mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
+        mMaxTailBuffers =
+            ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+                thread->frameCount();
+    }
 }
 
-// getEffectFromDesc_l() must be called with IAfThreadBase::mutex() held
-sp<IAfEffectModule> EffectChain::getEffectFromDesc_l(
+sp<IAfEffectModule> EffectChain::getEffectFromDesc(
         effect_descriptor_t *descriptor) const
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
 
     for (size_t i = 0; i < size; i++) {
@@ -2153,6 +2202,7 @@
 // getEffectFromId_l() must be called with IAfThreadBase::mutex() held
 sp<IAfEffectModule> EffectChain::getEffectFromId_l(int id) const
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
 
     for (size_t i = 0; i < size; i++) {
@@ -2168,6 +2218,7 @@
 sp<IAfEffectModule> EffectChain::getEffectFromType_l(
         const effect_uuid_t *type) const
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
 
     for (size_t i = 0; i < size; i++) {
@@ -2178,7 +2229,7 @@
     return 0;
 }
 
-std::vector<int> EffectChain::getEffectIds() const
+std::vector<int> EffectChain::getEffectIds_l() const
 {
     std::vector<int> ids;
     audio_utils::lock_guard _l(mutex());
@@ -2208,8 +2259,7 @@
 }
 
 // Must be called with EffectChain::mutex() locked
-void EffectChain::process_l()
-{
+void EffectChain::process_l() {
     // never process effects when:
     // - on an OFFLOAD thread
     // - no more tracks are on the session and the effect tail has been rendered
@@ -2252,15 +2302,14 @@
     }
     bool doResetVolume = false;
     for (size_t i = 0; i < size; i++) {
-        doResetVolume = mEffects[i]->updateState() || doResetVolume;
+        doResetVolume = mEffects[i]->updateState_l() || doResetVolume;
     }
     if (doResetVolume) {
         resetVolume_l();
     }
 }
 
-// createEffect_l() must be called with IAfThreadBase::mutex() held
-status_t EffectChain::createEffect_l(sp<IAfEffectModule>& effect,
+status_t EffectChain::createEffect(sp<IAfEffectModule>& effect,
                                                    effect_descriptor_t *desc,
                                                    int id,
                                                    audio_session_t sessionId,
@@ -2270,7 +2319,7 @@
     effect = new EffectModule(mEffectCallback, desc, id, sessionId, pinned, AUDIO_PORT_HANDLE_NONE);
     status_t lStatus = effect->status();
     if (lStatus == NO_ERROR) {
-        lStatus = addEffect_ll(effect);
+        lStatus = addEffect_l(effect);
     }
     if (lStatus != NO_ERROR) {
         effect.clear();
@@ -2278,22 +2327,22 @@
     return lStatus;
 }
 
-// addEffect_l() must be called with IAfThreadBase::mutex() held
-status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
+status_t EffectChain::addEffect(const sp<IAfEffectModule>& effect)
 {
     audio_utils::lock_guard _l(mutex());
-    return addEffect_ll(effect);
+    return addEffect_l(effect);
 }
-// addEffect_l() must be called with IAfThreadBase::mutex() and EffectChain::mutex() held
-status_t EffectChain::addEffect_ll(const sp<IAfEffectModule>& effect)
+// addEffect_l() must be called with EffectChain::mutex() held
+status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
 {
     effect->setCallback(mEffectCallback);
 
     effect_descriptor_t desc = effect->desc();
+    ssize_t idx_insert = 0;
     if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
         // Auxiliary effects are inserted at the beginning of mEffects vector as
         // they are processed first and accumulated in chain input buffer
-        mEffects.insertAt(effect, 0);
+        mEffects.insertAt(effect, idx_insert);
 
         // the input buffer for auxiliary effect contains mono samples in
         // 32 bit format. This is to avoid saturation in AudoMixer
@@ -2306,14 +2355,14 @@
                 numSamples * sizeof(float), &halBuffer);
         if (result != OK) return result;
 
-        effect->configure();
+        effect->configure_l();
 
         effect->setInBuffer(halBuffer);
         // auxiliary effects output samples to chain input buffer for further processing
         // by insert effects
         effect->setOutBuffer(mInBuffer);
     } else {
-        ssize_t idx_insert = getInsertIndex(desc);
+        idx_insert = getInsertIndex_l(desc);
         if (idx_insert < 0) {
             return INVALID_OPERATION;
         }
@@ -2321,7 +2370,7 @@
         size_t previousSize = mEffects.size();
         mEffects.insertAt(effect, idx_insert);
 
-        effect->configure();
+        effect->configure_l();
 
         // - By default:
         //   All effects read samples from chain input buffer.
@@ -2336,9 +2385,9 @@
             effect->setOutBuffer(mOutBuffer);
             if (idx_insert == 0) {
                 if (previousSize != 0) {
-                    mEffects[1]->configure();
+                    mEffects[1]->configure_l();
                     mEffects[1]->setInBuffer(mOutBuffer);
-                    mEffects[1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[1]->updateAccessMode_l();  // reconfig if needed.
                 }
                 effect->setInBuffer(mInBuffer);
             } else {
@@ -2348,9 +2397,9 @@
             effect->setInBuffer(mInBuffer);
             if (idx_insert == static_cast<ssize_t>(previousSize)) {
                 if (idx_insert != 0) {
-                    mEffects[idx_insert-1]->configure();
+                    mEffects[idx_insert-1]->configure_l();
                     mEffects[idx_insert-1]->setOutBuffer(mInBuffer);
-                    mEffects[idx_insert - 1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[idx_insert - 1]->updateAccessMode_l();  // reconfig if needed.
                 }
                 effect->setOutBuffer(mOutBuffer);
             } else {
@@ -2360,21 +2409,33 @@
         ALOGV("%s effect %p, added in chain %p at rank %zu",
                 __func__, effect.get(), this, idx_insert);
     }
-    effect->configure();
+    effect->configure_l();
+
+    if (effect->isVolumeControl()) {
+        const auto volumeControlIndex = findVolumeControl_l(0, mEffects.size());
+        if (!volumeControlIndex.has_value() || (ssize_t)volumeControlIndex.value() < idx_insert) {
+            // If this effect will be the new volume control effect when it is enabled, force
+            // initializing the volume as 0 for volume control effect for safer ramping. The actual
+            // volume will be set from setVolume_l.
+            uint32_t left = 0;
+            uint32_t right = 0;
+            effect->setVolume(&left, &right, true /*controller*/, true /*force*/);
+        }
+    }
 
     return NO_ERROR;
 }
 
 std::optional<size_t> EffectChain::findVolumeControl_l(size_t from, size_t to) const {
     for (size_t i = std::min(to, mEffects.size()); i > from; i--) {
-        if (mEffects[i - 1]->isVolumeControlEnabled()) {
+        if (mEffects[i - 1]->isVolumeControlEnabled_l()) {
             return i - 1;
         }
     }
     return std::nullopt;
 }
 
-ssize_t EffectChain::getInsertIndex(const effect_descriptor_t& desc) {
+ssize_t EffectChain::getInsertIndex_l(const effect_descriptor_t& desc) {
     // Insert effects are inserted at the end of mEffects vector as they are processed
     //  after track and auxiliary effects.
     // Insert effect order as a function of indicated preference:
@@ -2389,7 +2450,7 @@
     // already present
     // Spatializer or Downmixer effects are inserted in first position because
     // they adapt the channel count for all other effects in the chain
-    if ((memcmp(&desc.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0)
+    if (IAfEffectModule::isSpatializer(&desc.type)
             || (memcmp(&desc.type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0)) {
         return 0;
     }
@@ -2447,14 +2508,14 @@
     return idx_insert;
 }
 
-// removeEffect_l() must be called with IAfThreadBase::mutex() held
-size_t EffectChain::removeEffect_l(const sp<IAfEffectModule>& effect,
+size_t EffectChain::removeEffect(const sp<IAfEffectModule>& effect,
                                                  bool release)
 {
     audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     uint32_t type = effect->desc().flags & EFFECT_FLAG_TYPE_MASK;
 
+    const bool hasThreadAttached = mEffectCallback->hasThreadAttached();
     for (size_t i = 0; i < size; i++) {
         if (effect == mEffects[i]) {
             // calling stop here will remove pre-processing effect from the audio HAL.
@@ -2462,17 +2523,17 @@
             // the middle of a read from audio HAL
             if (mEffects[i]->state() == EffectModule::ACTIVE ||
                     mEffects[i]->state() == EffectModule::STOPPING) {
-                mEffects[i]->stop();
+                mEffects[i]->stop_l();
             }
             if (release) {
-                mEffects[i]->release_l();
+                mEffects[i]->release_l("EffectChain::removeEffect");
             }
-
-            if (type != EFFECT_FLAG_TYPE_AUXILIARY) {
+            // Skip operation when no thread attached (could lead to sigfpe as framecount is 0...)
+            if (hasThreadAttached && type != EFFECT_FLAG_TYPE_AUXILIARY) {
                 if (i == size - 1 && i != 0) {
-                    mEffects[i - 1]->configure();
+                    mEffects[i - 1]->configure_l();
                     mEffects[i - 1]->setOutBuffer(mOutBuffer);
-                    mEffects[i - 1]->updateAccessMode();      // reconfig if neeeded.
+                    mEffects[i - 1]->updateAccessMode_l();      // reconfig if needed.
                 }
             }
             mEffects.removeAt(i);
@@ -2480,10 +2541,10 @@
             // make sure the input buffer configuration for the new first effect in the chain
             // is updated if needed (can switch from HAL channel mask to mixer channel mask)
             if (type != EFFECT_FLAG_TYPE_AUXILIARY // TODO(b/284522658) breaks for aux FX, why?
-                    && i == 0 && size > 1) {
-                mEffects[0]->configure();
+                    && hasThreadAttached && i == 0 && size > 1) {
+                mEffects[0]->configure_l();
                 mEffects[0]->setInBuffer(mInBuffer);
-                mEffects[0]->updateAccessMode();      // reconfig if neeeded.
+                mEffects[0]->updateAccessMode_l();      // reconfig if needed.
             }
 
             ALOGV("removeEffect_l() effect %p, removed from chain %p at rank %zu", effect.get(),
@@ -2498,6 +2559,7 @@
 // setDevices_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setDevices_l(const AudioDeviceTypeAddrVector &devices)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setDevices(devices);
@@ -2507,6 +2569,7 @@
 // setInputDevice_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setInputDevice_l(const AudioDeviceTypeAddr &device)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setInputDevice(device);
@@ -2516,6 +2579,7 @@
 // setMode_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setMode_l(audio_mode_t mode)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setMode(mode);
@@ -2525,6 +2589,7 @@
 // setAudioSource_l() must be called with IAfThreadBase::mutex() held
 void EffectChain::setAudioSource_l(audio_source_t source)
 {
+    audio_utils::lock_guard _l(mutex());
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         mEffects[i]->setAudioSource(source);
@@ -2533,14 +2598,19 @@
 
 bool EffectChain::hasVolumeControlEnabled_l() const {
     for (const auto &effect : mEffects) {
-        if (effect->isVolumeControlEnabled()) return true;
+        if (effect->isVolumeControlEnabled_l()) return true;
     }
     return false;
 }
 
-// setVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
-bool EffectChain::setVolume_l(uint32_t *left, uint32_t *right, bool force)
-{
+// setVolume() must be called without EffectChain::mutex()
+bool EffectChain::setVolume(uint32_t* left, uint32_t* right, bool force) {
+    audio_utils::lock_guard _l(mutex());
+    return setVolume_l(left, right, force);
+}
+
+// setVolume_l() must be called with EffectChain::mutex() held
+bool EffectChain::setVolume_l(uint32_t* left, uint32_t* right, bool force) {
     uint32_t newLeft = *left;
     uint32_t newRight = *right;
     const size_t size = mEffects.size();
@@ -2560,22 +2630,18 @@
         }
         return volumeControlIndex.has_value();
     }
+    mVolumeControlEffect = volumeControlEffect;
 
-    if (volumeControlEffect != cachedVolumeControlEffect) {
-        // The volume control effect is a new one. Set the old one as full volume. Set the new onw
-        // as zero for safe ramping.
-        if (cachedVolumeControlEffect != nullptr) {
+    for (int i = 0; i < ctrlIdx; ++i) {
+        // For all volume control effects before the effect that controls volume, set the volume
+        // to maximum to avoid double attenuation.
+        if (mEffects[i]->isVolumeControl()) {
             uint32_t leftMax = 1 << 24;
             uint32_t rightMax = 1 << 24;
-            cachedVolumeControlEffect->setVolume(&leftMax, &rightMax, true /*controller*/);
+            mEffects[i]->setVolume(&leftMax, &rightMax, true /*controller*/, true /*force*/);
         }
-        if (volumeControlEffect != nullptr) {
-            uint32_t leftZero = 0;
-            uint32_t rightZero = 0;
-            volumeControlEffect->setVolume(&leftZero, &rightZero, true /*controller*/);
-        }
-        mVolumeControlEffect = volumeControlEffect;
     }
+
     mLeftVolume = newLeft;
     mRightVolume = newRight;
 
@@ -2615,7 +2681,7 @@
     return volumeControlIndex.has_value();
 }
 
-// resetVolume_l() must be called with IAfThreadBase::mutex() or EffectChain::mutex() held
+// resetVolume_l() must be called with EffectChain::mutex() held
 void EffectChain::resetVolume_l()
 {
     if ((mLeftVolume != UINT_MAX) && (mRightVolume != UINT_MAX)) {
@@ -2625,8 +2691,12 @@
     }
 }
 
-// containsHapticGeneratingEffect_l must be called with
-// IAfThreadBase::mutex() or EffectChain::mutex() held
+bool EffectChain::containsHapticGeneratingEffect()
+{
+    audio_utils::lock_guard _l(mutex());
+    return containsHapticGeneratingEffect_l();
+}
+// containsHapticGeneratingEffect_l must be called with EffectChain::mutex() held
 bool EffectChain::containsHapticGeneratingEffect_l()
 {
     for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2637,15 +2707,15 @@
     return false;
 }
 
-void EffectChain::setHapticIntensity_l(int id, os::HapticScale intensity)
+void EffectChain::setHapticScale_l(int id, os::HapticScale hapticScale)
 {
     audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); ++i) {
-        mEffects[i]->setHapticIntensity(id, intensity);
+        mEffects[i]->setHapticScale_l(id, hapticScale);
     }
 }
 
-void EffectChain::syncHalEffectsState()
+void EffectChain::syncHalEffectsState_l()
 {
     audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
@@ -2714,7 +2784,7 @@
         }
 
         if (desc->mRefCount++ == 0) {
-            sp<IAfEffectModule> effect = getEffectIfEnabled(type);
+            sp<IAfEffectModule> effect = getEffectIfEnabled_l(type);
             if (effect != 0) {
                 desc->mEffect = effect;
                 effect->setSuspended(true);
@@ -2808,8 +2878,7 @@
 #endif //OPENSL_ES_H_
 
 /* static */
-bool EffectChain::isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type)
-{
+bool EffectChain::isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type) {
     // Only NS and AEC are suspended when BtNRec is off
     if ((memcmp(type, FX_IID_AEC, sizeof(effect_uuid_t)) == 0) ||
         (memcmp(type, FX_IID_NS, sizeof(effect_uuid_t)) == 0)) {
@@ -2835,6 +2904,7 @@
         Vector< sp<IAfEffectModule> > &effects)
 {
     effects.clear();
+    audio_utils::lock_guard _l(mutex());
     for (size_t i = 0; i < mEffects.size(); i++) {
         if (isEffectEligibleForSuspend(mEffects[i]->desc())) {
             effects.add(mEffects[i]);
@@ -2842,15 +2912,13 @@
     }
 }
 
-sp<IAfEffectModule> EffectChain::getEffectIfEnabled(const effect_uuid_t *type)
+sp<IAfEffectModule> EffectChain::getEffectIfEnabled_l(const effect_uuid_t *type)
 {
     sp<IAfEffectModule> effect = getEffectFromType_l(type);
     return effect != 0 && effect->isEnabled() ? effect : 0;
 }
 
-void EffectChain::checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect,
-                                                            bool enabled)
-{
+void EffectChain::checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled) {
     ssize_t index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
     if (enabled) {
         if (index < 0) {
@@ -2865,12 +2933,11 @@
             setEffectSuspended_l(&effect->desc().type, enabled);
             index = mSuspendedEffects.indexOfKey(effect->desc().type.timeLow);
             if (index < 0) {
-                ALOGW("checkSuspendOnEffectEnabled() Fx should be suspended here!");
+                ALOGW("%s Fx should be suspended here!", __func__);
                 return;
             }
         }
-        ALOGV("checkSuspendOnEffectEnabled() enable suspending fx %08x",
-            effect->desc().type.timeLow);
+        ALOGV("%s enable suspending fx %08x", __func__, effect->desc().type.timeLow);
         sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
         // if effect is requested to suspended but was not yet enabled, suspend it now.
         if (desc->mEffect == 0) {
@@ -2882,8 +2949,7 @@
         if (index < 0) {
             return;
         }
-        ALOGV("checkSuspendOnEffectEnabled() disable restoring fx %08x",
-            effect->desc().type.timeLow);
+        ALOGV("%s disable restoring fx %08x", __func__, effect->desc().type.timeLow);
         sp<SuspendedEffectDesc> desc = mSuspendedEffects.valueAt(index);
         desc->mEffect.clear();
         effect->setSuspended(false);
@@ -2909,6 +2975,12 @@
 
 void EffectChain::setThread(const sp<IAfThreadBase>& thread)
 {
+    if (thread != nullptr) {
+        mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
+        mMaxTailBuffers =
+            ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+                thread->frameCount();
+    }
     audio_utils::lock_guard _l(mutex());
     mEffectCallback->setThread(thread);
 }
@@ -2985,6 +3057,20 @@
     return true;
 }
 
+// sendMetadata_l() must be called with thread->mutex() held
+void EffectChain::sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
+        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata) {
+    audio_utils::lock_guard _l(mutex());
+    for (const auto& effect : mEffects) {
+        if (spatializedMetadata.has_value()
+                && IAfEffectModule::isSpatializer(&effect->desc().type)) {
+            effect->sendMetadata_ll(spatializedMetadata.value());
+        } else {
+            effect->sendMetadata_ll(allMetadata);
+        }
+    }
+}
+
 // EffectCallbackInterface implementation
 status_t EffectChain::EffectCallback::createEffectHal(
         const effect_uuid_t *pEffectUuid, int32_t sessionId, int32_t deviceId,
@@ -3084,7 +3170,7 @@
 uint32_t EffectChain::EffectCallback::sampleRate() const {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return 0;
+        return DEFAULT_OUTPUT_SAMPLE_RATE;
     }
     return t->sampleRate();
 }
@@ -3092,19 +3178,20 @@
 audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const
 NO_THREAD_SAFETY_ANALYSIS
 // calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+// calling function 'isFirstEffect_l' requires holding mutex 'EffectChain_Mutex' exclusively
 {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
 
     if (mThreadType == IAfThreadBase::SPATIALIZER) {
         if (c->sessionId() == AUDIO_SESSION_OUTPUT_STAGE) {
-            if (c->isFirstEffect(id)) {
+            if (c->isFirstEffect_l(id)) {
                 return t->mixerChannelMask();
             } else {
                 return t->channelMask();
@@ -3134,11 +3221,11 @@
 {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
-        return AUDIO_CHANNEL_NONE;
+        return AUDIO_CHANNEL_OUT_STEREO;
     }
 
     if (mThreadType == IAfThreadBase::SPATIALIZER) {
@@ -3172,7 +3259,8 @@
 size_t EffectChain::EffectCallback::frameCount() const {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
-        return 0;
+        // frameCount cannot be zero.
+        return 1;
     }
     return t->frameCount();
 }
@@ -3198,8 +3286,9 @@
     t->setVolumeForOutput_l(left, right);
 }
 
-void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(
-        const sp<IAfEffectBase>& effect, bool enabled, bool threadLocked) {
+void EffectChain::EffectCallback::checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
+                                                              bool enabled, bool threadLocked)
+        NO_THREAD_SAFETY_ANALYSIS {
     const sp<IAfThreadBase> t = thread().promote();
     if (t == nullptr) {
         return;
@@ -3211,7 +3300,7 @@
         return;
     }
     // in EffectChain context, an EffectBase is always from an EffectModule so static cast is safe
-    c->checkSuspendOnEffectEnabled(effect->asEffectModule(), enabled);
+    c->checkSuspendOnEffectEnabled_l(effect->asEffectModule(), enabled);
 }
 
 void EffectChain::EffectCallback::onEffectEnable(const sp<IAfEffectBase>& effect) {
@@ -3243,7 +3332,7 @@
     return true;
 }
 
-void EffectChain::EffectCallback::resetVolume() {
+void EffectChain::EffectCallback::resetVolume_l() {
     sp<IAfEffectChain> c = chain().promote();
     if (c == nullptr) {
         return;
@@ -3304,7 +3393,7 @@
     return status;
 }
 
-status_t DeviceEffectProxy::init(
+status_t DeviceEffectProxy::init_l(
         const std::map <audio_patch_handle_t, IAfPatchPanel::Patch>& patches) {
 //For all audio patches
 //If src or sink device match
@@ -3408,7 +3497,7 @@
             } else {
                 mHalEffect->setDevices({mDevice});
             }
-            mHalEffect->configure();
+            mHalEffect->configure_l();
         }
         *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
                                    mNotifyFramesProcessed);
@@ -3474,7 +3563,7 @@
 {
     audio_utils::lock_guard _l(proxyMutex());
     if (effect == mHalEffect) {
-        mHalEffect->release_l();
+        mHalEffect->release_l("DeviceEffectProxy::removeEffect");
         mHalEffect.clear();
         mDevicePort.id = AUDIO_PORT_HANDLE_NONE;
     }
@@ -3697,7 +3786,7 @@
     if (effect == nullptr) {
         return;
     }
-    effect->start();
+    effect->start_l();
 }
 
 void DeviceEffectProxy::ProxyCallback::onEffectDisable(
@@ -3706,7 +3795,7 @@
     if (effect == nullptr) {
         return;
     }
-    effect->stop();
+    effect->stop_l();
 }
 
 } // namespace android
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 9208c88..64a4e62 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -25,6 +25,8 @@
 #include <private/media/AudioEffectShared.h>
 
 #include <map>  // avoid transitive dependency
+#include <optional>
+#include <vector>
 
 namespace android {
 
@@ -78,11 +80,11 @@
                         { return (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK)
                             == EFFECT_FLAG_VOLUME_MONITOR; }
 
-    status_t setEnabled(bool enabled, bool fromHandle) override;
-    status_t setEnabled_l(bool enabled) final;
+    status_t setEnabled(bool enabled, bool fromHandle) override EXCLUDES_EffectBase_Mutex;
+    status_t setEnabled_l(bool enabled) final REQUIRES(audio_utils::EffectBase_Mutex);
     bool isEnabled() const final;
-    void setSuspended(bool suspended) final;
-    bool suspended() const final;
+    void setSuspended(bool suspended) final EXCLUDES_EffectBase_Mutex;
+    bool suspended() const final EXCLUDES_EffectBase_Mutex;
 
     status_t command(int32_t __unused,
                              const std::vector<uint8_t>& __unused,
@@ -99,36 +101,40 @@
         return mCallback.load();
     }
 
-    status_t addHandle(IAfEffectHandle *handle) final;
-    ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) final;
-    ssize_t removeHandle(IAfEffectHandle *handle) final;
-    ssize_t removeHandle_l(IAfEffectHandle *handle) final;
-    IAfEffectHandle* controlHandle_l() final;
-    bool purgeHandles() final;
+    status_t addHandle(IAfEffectHandle* handle) final EXCLUDES_EffectBase_Mutex;
+    ssize_t disconnectHandle(IAfEffectHandle* handle,
+                             bool unpinIfLast) final EXCLUDES_EffectBase_Mutex;
+    ssize_t removeHandle(IAfEffectHandle* handle) final EXCLUDES_EffectBase_Mutex;
+    ssize_t removeHandle_l(IAfEffectHandle* handle) final REQUIRES(audio_utils::EffectBase_Mutex);
+    IAfEffectHandle* controlHandle_l() final REQUIRES(audio_utils::EffectBase_Mutex);
+    bool purgeHandles() final EXCLUDES_EffectBase_Mutex;
 
-    void             checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
+    void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) final;
 
-    bool             isPinned() const final { return mPinned; }
-    void             unPin() final { mPinned = false; }
+    bool isPinned() const final { return mPinned; }
+    void unPin() final { mPinned = false; }
 
-    audio_utils::mutex& mutex() const final { return mMutex; }
+    audio_utils::mutex& mutex() const final
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_Mutex) {
+        return mMutex;
+    }
 
-    status_t         updatePolicyState() final;
+    status_t updatePolicyState() final EXCLUDES_EffectBase_Mutex;
 
     sp<IAfEffectModule> asEffectModule() override { return nullptr; }
     sp<IAfDeviceEffectProxy> asDeviceEffectProxy() override { return nullptr; }
 
-    void             dump(int fd, const Vector<String16>& args) const override;
+    void dump(int fd, const Vector<String16>& args) const override;
 
 protected:
-    bool             isInternal_l() const {
-                         for (auto handle : mHandles) {
-                            if (handle->client() != nullptr) {
-                                return false;
-                            }
-                         }
-                         return true;
-                     }
+    bool isInternal_l() const REQUIRES(audio_utils::EffectBase_Mutex) {
+        for (auto handle : mHandles) {
+            if (handle->client() != nullptr) {
+                return false;
+            }
+        }
+        return true;
+    }
 
     bool             mPinned = false;
 
@@ -150,7 +156,10 @@
     // Audio policy effect state management
     // Mutex protecting transactions with audio policy manager as mutex() cannot
     // be held to avoid cross deadlocks with audio policy mutex
-    audio_utils::mutex& policyMutex() const { return mPolicyMutex; }
+    audio_utils::mutex& policyMutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_PolicyMutex) {
+        return mPolicyMutex;
+    }
     mutable audio_utils::mutex mPolicyMutex{audio_utils::MutexOrder::kEffectBase_PolicyMutex};
     // Effect is registered in APM or not
     bool                      mPolicyRegistered = false;
@@ -175,25 +184,23 @@
                     int id,
                     audio_session_t sessionId,
                     bool pinned,
-                    audio_port_handle_t deviceId);
-    ~EffectModule() override;
+                    audio_port_handle_t deviceId) REQUIRES(audio_utils::EffectChain_Mutex);
+    ~EffectModule() override REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void process() final;
-    bool updateState() final;
-    status_t command(int32_t cmdCode,
-                     const std::vector<uint8_t>& cmdData,
-                     int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) final;
+    void process() final EXCLUDES_EffectBase_Mutex;
+    bool updateState_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final EXCLUDES_EffectBase_Mutex;
 
-    void reset_l() final;
-    status_t configure() final;
-    status_t init() final;
+    void reset_l() final REQUIRES(audio_utils::EffectBase_Mutex);
+    status_t configure_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    status_t init_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
     uint32_t status() const final {
         return mStatus;
     }
     bool isProcessEnabled() const final;
-    bool isOffloadedOrDirect() const final;
-    bool isVolumeControlEnabled() const final;
+    bool isOffloadedOrDirect_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
+    bool isVolumeControlEnabled_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final;
     int16_t *inBuffer() const final {
         return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
@@ -203,34 +210,42 @@
         return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
     }
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    void updateAccessMode() final {
-                    if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
-                        configure();
-                    }
-                }
-    status_t setDevices(const AudioDeviceTypeAddrVector &devices) final;
-    status_t setInputDevice(const AudioDeviceTypeAddr &device) final;
-    status_t setVolume(uint32_t *left, uint32_t *right, bool controller) final;
-    status_t setMode(audio_mode_t mode) final;
-    status_t setAudioSource(audio_source_t source) final;
-    status_t start() final;
-    status_t stop() final;
+    void updateAccessMode_l() final REQUIRES(audio_utils::EffectChain_Mutex) {
+        if (requiredEffectBufferAccessMode() != mConfig.outputCfg.accessMode) {
+            configure_l();
+        }
+    }
+    status_t setDevices(const AudioDeviceTypeAddrVector& devices) final EXCLUDES_EffectBase_Mutex;
+    status_t setInputDevice(const AudioDeviceTypeAddr& device) final EXCLUDES_EffectBase_Mutex;
+    status_t setVolume(uint32_t *left, uint32_t *right, bool controller, bool force) final;
+    status_t setMode(audio_mode_t mode) final EXCLUDES_EffectBase_Mutex;
+    status_t setAudioSource(audio_source_t source) final EXCLUDES_EffectBase_Mutex;
+    status_t start_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t stop_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
 
-    status_t setOffloaded(bool offloaded, audio_io_handle_t io) final;
-    bool isOffloaded() const final;
-    void addEffectToHal_l() final;
-    void release_l() final;
+    status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    bool isOffloaded_l() const final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    void addEffectToHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    void release_l(const std::string& from = "") final REQUIRES(audio_utils::EffectChain_Mutex);
 
     sp<IAfEffectModule> asEffectModule() final { return this; }
 
     bool isHapticGenerator() const final;
+    bool isSpatializer() const final;
 
-    status_t setHapticIntensity(int id, os::HapticScale intensity) final;
-    status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) final;
+    status_t setHapticScale_l(int id, os::HapticScale hapticScale) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) final
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
+    status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) final
+            REQUIRES(audio_utils::ThreadBase_Mutex,
+                     audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
 
-    status_t getConfigs(audio_config_base_t* inputCfg,
-                                audio_config_base_t* outputCfg,
-                                bool* isOutput) const final;
+    status_t getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                          bool* isOutput) const final
+            REQUIRES(audio_utils::EffectHandle_Mutex) EXCLUDES_EffectBase_Mutex;
 
     void dump(int fd, const Vector<String16>& args) const final;
 
@@ -241,9 +256,9 @@
 
     DISALLOW_COPY_AND_ASSIGN(EffectModule);
 
-    status_t start_l();
-    status_t stop_l();
-    status_t removeEffectFromHal_l();
+    status_t start_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
+    status_t stop_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
+    status_t removeEffectFromHal_l() REQUIRES(audio_utils::EffectChain_Mutex);
     status_t sendSetAudioDevicesCommand(const AudioDeviceTypeAddrVector &devices, uint32_t cmdCode);
     effect_buffer_access_e requiredEffectBufferAccessMode() const {
         return mConfig.inputCfg.buffer.raw == mConfig.outputCfg.buffer.raw
@@ -291,6 +306,10 @@
     static constexpr pid_t INVALID_PID = (pid_t)-1;
     // this tid is allowed to call setVolume() without acquiring the mutex.
     pid_t mSetVolumeReentrantTid = INVALID_PID;
+
+    std::optional<std::vector<uint32_t>> mVolume;
+    // TODO: b/315995877, remove this debugging string after root cause
+    std::string mEffectInterfaceDebug;
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -366,7 +385,9 @@
 private:
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    audio_utils::mutex& mutex() const { return mMutex; }
+    audio_utils::mutex& mutex() const RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
+        return mMutex;
+    }
     // protects IEffect method calls
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectHandle_Mutex};
     const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
@@ -397,36 +418,49 @@
 // it also provide it's own input buffer used by the track as accumulation buffer.
 class EffectChain : public IAfEffectChain {
 public:
-    EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId);
+    EffectChain(const sp<IAfThreadBase>& thread,
+                audio_session_t sessionId,
+                const sp<IAfThreadCallback>& afThreadCallback);
 
-    void process_l() final;
+    void process_l() final REQUIRES(audio_utils::EffectChain_Mutex);
 
-    audio_utils::mutex& mutex() const final { return mMutex; }
+    audio_utils::mutex& mutex() const final RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) {
+        return mMutex;
+    }
 
-    status_t createEffect_l(sp<IAfEffectModule>& effect,
-                            effect_descriptor_t *desc,
-                            int id,
-                            audio_session_t sessionId,
-                            bool pinned) final;
-    status_t addEffect_l(const sp<IAfEffectModule>& handle) final;
-    status_t addEffect_ll(const sp<IAfEffectModule>& handle) final;
-    size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) final;
+    status_t createEffect(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+                            audio_session_t sessionId, bool pinned) final
+            EXCLUDES_EffectChain_Mutex;
+    status_t addEffect(const sp<IAfEffectModule>& handle) final
+            EXCLUDES_EffectChain_Mutex;
+    status_t addEffect_l(const sp<IAfEffectModule>& handle) final
+            REQUIRES(audio_utils::EffectChain_Mutex);
+    size_t removeEffect(const sp<IAfEffectModule>& handle, bool release = false) final
+            EXCLUDES_EffectChain_Mutex;
 
     audio_session_t sessionId() const final { return mSessionId; }
     void setSessionId(audio_session_t sessionId) final { mSessionId = sessionId; }
 
-    sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const final;
-    sp<IAfEffectModule> getEffectFromId_l(int id) const final;
-    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const final;
-    std::vector<int> getEffectIds() const final;
+    sp<IAfEffectModule> getEffectFromDesc(effect_descriptor_t* descriptor) const final
+            EXCLUDES_EffectChain_Mutex;
+    sp<IAfEffectModule> getEffectFromId_l(int id) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    std::vector<int> getEffectIds_l() const final REQUIRES(audio_utils::ThreadBase_Mutex);
     // FIXME use float to improve the dynamic range
 
-    bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) final;
-    void resetVolume_l() final;
-    void setDevices_l(const AudioDeviceTypeAddrVector &devices) final;
-    void setInputDevice_l(const AudioDeviceTypeAddr &device) final;
-    void setMode_l(audio_mode_t mode) final;
-    void setAudioSource_l(audio_source_t source) final;
+    bool setVolume(uint32_t* left, uint32_t* right,
+                   bool force = false) final EXCLUDES_EffectChain_Mutex;
+    void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    void setDevices_l(const AudioDeviceTypeAddrVector& devices) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    void setInputDevice_l(const AudioDeviceTypeAddr& device) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    void setMode_l(audio_mode_t mode) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+    void setAudioSource_l(audio_source_t source) final
+            REQUIRES(audio_utils::ThreadBase_Mutex)  EXCLUDES_EffectChain_Mutex;
 
     void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final {
         mInBuffer = buffer;
@@ -457,21 +491,22 @@
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
-    void setEffectSuspended_l(const effect_uuid_t *type,
-                              bool suspend) final;
+    void setEffectSuspended_l(const effect_uuid_t* type, bool suspend) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
     // suspend all eligible effects
-    void setEffectSuspendedAll_l(bool suspend) final;
+    void setEffectSuspendedAll_l(bool suspend) final REQUIRES(audio_utils::ThreadBase_Mutex);
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    void checkSuspendOnEffectEnabled(
-            const sp<IAfEffectModule>& effect, bool enabled) final;
+    void checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled) final
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void clearInputBuffer() final;
+    void clearInputBuffer() final EXCLUDES_EffectChain_Mutex;
 
     // At least one non offloadable effect in the chain is enabled
-    bool isNonOffloadableEnabled() const final;
-    bool isNonOffloadableEnabled_l() const final;
+    bool isNonOffloadableEnabled() const final EXCLUDES_EffectChain_Mutex;
+    bool isNonOffloadableEnabled_l() const final REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void syncHalEffectsState() final;
+    void syncHalEffectsState_l()
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex final;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
     void checkOutputFlagCompatibility(audio_output_flags_t *flags) const final;
@@ -490,32 +525,46 @@
 
     // isCompatibleWithThread_l() must be called with thread->mutex() held
     bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final
-            REQUIRES(audio_utils::ThreadBase_Mutex);
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
-    // Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
-    bool containsHapticGeneratingEffect_l() final;
+    bool containsHapticGeneratingEffect() final
+            EXCLUDES_EffectChain_Mutex;
 
-    void setHapticIntensity_l(int id, os::HapticScale intensity) final;
+    bool containsHapticGeneratingEffect_l() final
+            REQUIRES(audio_utils::EffectChain_Mutex);
+
+    void setHapticScale_l(int id, os::HapticScale hapticScale) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
 
     sp<EffectCallbackInterface> effectCallback() const final { return mEffectCallback; }
 
     wp<IAfThreadBase> thread() const final { return mEffectCallback->thread(); }
 
-    bool isFirstEffect(int id) const final {
+    bool isFirstEffect_l(int id) const final REQUIRES(audio_utils::EffectChain_Mutex) {
         return !mEffects.isEmpty() && id == mEffects[0]->id();
     }
 
     void dump(int fd, const Vector<String16>& args) const final;
 
-    size_t numberOfEffects() const final { return mEffects.size(); }
+    size_t numberOfEffects() const final {
+      audio_utils::lock_guard _l(mutex());
+      return mEffects.size();
+    }
 
     sp<IAfEffectModule> getEffectModule(size_t index) const final {
+        audio_utils::lock_guard _l(mutex());
         return mEffects[index];
     }
 
-    void setThread(const sp<IAfThreadBase>& thread) final;
+    void sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
+        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata)
+            final REQUIRES(audio_utils::ThreadBase_Mutex);
 
-private:
+    void setThread(const sp<IAfThreadBase>& thread) final EXCLUDES_EffectChain_Mutex;
+
+  private:
+    bool setVolume_l(uint32_t* left, uint32_t* right, bool force = false)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
     // For transaction consistency, please consider holding the EffectChain lock before
     // calling the EffectChain::EffectCallback methods, excepting
@@ -528,11 +577,13 @@
         // Note: ctors taking a weak pointer to their owner must not promote it
         // during construction (but may keep a reference for later promotion).
         EffectCallback(const wp<EffectChain>& owner,
-                const sp<IAfThreadBase>& thread)  // we take a sp<> but store a wp<>.
+                const sp<IAfThreadBase>& thread,
+                const sp<IAfThreadCallback>& afThreadCallback)  // we take a sp<> but store a wp<>.
             : mChain(owner)
-            , mThread(thread) {
-            mThreadType = thread->type();
-            mAfThreadCallback = thread->afThreadCallback();
+            , mThread(thread), mAfThreadCallback(afThreadCallback) {
+            if (thread != nullptr) {
+                mThreadType = thread->type();
+            }
         }
 
         status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -562,9 +613,10 @@
         void setVolumeForOutput(float left, float right) const override;
 
         // check if effects should be suspended/restored when a given effect is enable/disabled
-        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect,
-                              bool enabled, bool threadLocked) override;
-        void resetVolume() override;
+        void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect, bool enabled,
+                                         bool threadLocked) override;
+        void resetVolume_l() override
+                REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
         product_strategy_t strategy() const override;
         int32_t activeTrackCnt() const override;
         void onEffectEnable(const sp<IAfEffectBase>& effect) override;
@@ -573,6 +625,9 @@
         wp<IAfEffectChain> chain() const final { return mChain; }
 
         bool isAudioPolicyReady() const final {
+            if (mAfThreadCallback == nullptr) {
+                return false;
+            }
             return mAfThreadCallback->isAudioPolicyReady();
         }
 
@@ -580,15 +635,19 @@
 
         void setThread(const sp<IAfThreadBase>& thread) {
             mThread = thread;
-            mThreadType = thread->type();
-            mAfThreadCallback = thread->afThreadCallback();
+            if (thread != nullptr) {
+                mThreadType = thread->type();
+                mAfThreadCallback = thread->afThreadCallback();
+            }
         }
-
+        bool hasThreadAttached() const {
+            return thread().promote() != nullptr;
+        }
     private:
         const wp<IAfEffectChain> mChain;
         mediautils::atomic_wp<IAfThreadBase> mThread;
         sp<IAfThreadCallback> mAfThreadCallback;
-        IAfThreadBase::type_t mThreadType;
+        IAfThreadBase::type_t mThreadType = IAfThreadBase::MIXER;
     };
 
     DISALLOW_COPY_AND_ASSIGN(EffectChain);
@@ -604,31 +663,37 @@
 
     // get a list of effect modules to suspend when an effect of the type
     // passed is enabled.
-    void  getSuspendEligibleEffects(Vector<sp<IAfEffectModule>> &effects);
+    void getSuspendEligibleEffects(Vector<sp<IAfEffectModule>>& effects)
+            EXCLUDES_EffectChain_Mutex;
 
     // get an effect module if it is currently enable
-    sp<IAfEffectModule> getEffectIfEnabled(const effect_uuid_t *type);
+    sp<IAfEffectModule> getEffectIfEnabled_l(const effect_uuid_t* type)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
     // true if the effect whose descriptor is passed can be suspended
     // OEMs can modify the rules implemented in this method to exclude specific effect
     // types or implementations from the suspend/restore mechanism.
     bool isEffectEligibleForSuspend(const effect_descriptor_t& desc);
 
-    static bool isEffectEligibleForBtNrecSuspend(const effect_uuid_t *type);
+    static bool isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type)
+            REQUIRES(audio_utils::ThreadBase_Mutex);
 
-    void clearInputBuffer_l();
+    void clearInputBuffer_l() REQUIRES(audio_utils::EffectChain_Mutex);
 
     // true if any effect module within the chain has volume control
-    bool hasVolumeControlEnabled_l() const;
+    bool hasVolumeControlEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex);
 
-    void setVolumeForOutput_l(uint32_t left, uint32_t right);
+    void setVolumeForOutput_l(uint32_t left, uint32_t right)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
-    ssize_t getInsertIndex(const effect_descriptor_t& desc);
+    ssize_t getInsertIndex_l(const effect_descriptor_t& desc)
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
-    std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const;
+    std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const
+            REQUIRES(audio_utils::EffectChain_Mutex);
 
     // mutex protecting effect list
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectChain_Mutex};
-             Vector<sp<IAfEffectModule>> mEffects; // list of effect modules
+             Vector<sp<IAfEffectModule>> mEffects  GUARDED_BY(mutex()); // list of effect modules
              audio_session_t mSessionId; // audio session ID
              sp<EffectBufferHalInterface> mInBuffer;  // chain input buffer
              sp<EffectBufferHalInterface> mOutBuffer; // chain output buffer
@@ -668,11 +733,11 @@
     status_t setEnabled(bool enabled, bool fromHandle) final;
     sp<IAfDeviceEffectProxy> asDeviceEffectProxy() final { return this; }
 
-    status_t init(const std::map<audio_patch_handle_t,
-            IAfPatchPanel::Patch>& patches) final;
+    status_t init_l(const std::map<audio_patch_handle_t, IAfPatchPanel::Patch>& patches) final
+            REQUIRES(audio_utils::DeviceEffectManager_Mutex) EXCLUDES_EffectBase_Mutex;
 
     status_t onCreatePatch(audio_patch_handle_t patchHandle,
-            const IAfPatchPanel::Patch& patch) final;
+                           const IAfPatchPanel::Patch& patch) final;
 
     status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
            const IAfPatchPanel::Patch& patch) final;
@@ -690,10 +755,8 @@
     audio_channel_mask_t channelMask() const final;
     uint32_t channelCount() const final;
 
-    status_t command(int32_t cmdCode,
-                     const std::vector<uint8_t>& cmdData,
-                     int32_t maxReplySize,
-                     std::vector<uint8_t>* reply) final;
+    status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final EXCLUDES_DeviceEffectProxy_ProxyMutex;
 
     void dump2(int fd, int spaces) const final;
 
@@ -739,7 +802,7 @@
 
         void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                               bool enabled __unused, bool threadLocked __unused) override {}
-        void resetVolume() override {}
+        void resetVolume_l() override REQUIRES(audio_utils::EffectChain_Mutex) {}
         product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
         int32_t activeTrackCnt() const override { return 0; }
         void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override;
@@ -759,13 +822,16 @@
     };
 
     status_t checkPort(const IAfPatchPanel::Patch& patch,
-            const struct audio_port_config *port, sp<IAfEffectHandle> *handle);
+            const struct audio_port_config* port, sp<IAfEffectHandle>* handle);
 
     const AudioDeviceTypeAddr mDevice;
     const sp<DeviceEffectManagerCallback> mManagerCallback;
     const sp<ProxyCallback> mMyCallback;
 
-    audio_utils::mutex& proxyMutex() const { return mProxyMutex; }
+    audio_utils::mutex& proxyMutex() const
+            RETURN_CAPABILITY(android::audio_utils::DeviceEffectProxy_ProxyMutex) {
+        return mProxyMutex;
+    }
     mutable audio_utils::mutex mProxyMutex{
             audio_utils::MutexOrder::kDeviceEffectProxy_ProxyMutex};
     std::map<audio_patch_handle_t, sp<IAfEffectHandle>> mEffectHandles; // protected by mProxyMutex
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 8c5bc4b..b9bb18c 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -80,7 +80,7 @@
     // Methods usually implemented with help from EffectChain: pay attention to mutex locking order
     virtual product_strategy_t strategy() const = 0;
     virtual int32_t activeTrackCnt() const = 0;
-    virtual void resetVolume() = 0;
+    virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual wp<IAfEffectChain> chain() const = 0;
     virtual bool isAudioPolicyReady() const = 0;
 };
@@ -106,43 +106,45 @@
     virtual bool isOffloadable() const = 0;
     virtual bool isImplementationSoftware() const = 0;
     virtual bool isProcessImplemented() const = 0;
-    virtual bool isVolumeControl() const = 0;
+    virtual bool isVolumeControl() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual bool isVolumeMonitor() const = 0;
     virtual bool isEnabled() const = 0;
     virtual bool isPinned() const = 0;
     virtual void unPin() = 0;
-    virtual status_t updatePolicyState() = 0;
-    virtual bool purgeHandles() = 0;
+    virtual status_t updatePolicyState() EXCLUDES_EffectBase_Mutex = 0;
+    virtual bool purgeHandles() EXCLUDES_EffectBase_Mutex = 0;
     virtual void checkSuspendOnEffectEnabled(bool enabled, bool threadLocked) = 0;
 
     // mCallback is atomic so this can be lock-free.
     virtual void setCallback(const sp<EffectCallbackInterface>& callback) = 0;
     virtual sp<EffectCallbackInterface> getCallback() const = 0;
 
-    virtual status_t addHandle(IAfEffectHandle *handle) = 0;
-    virtual ssize_t removeHandle(IAfEffectHandle *handle) = 0;
+    virtual status_t addHandle(IAfEffectHandle* handle) EXCLUDES_EffectBase_Mutex = 0;
+    virtual ssize_t removeHandle(IAfEffectHandle* handle) EXCLUDES_EffectBase_Mutex = 0;
 
     virtual sp<IAfEffectModule> asEffectModule() = 0;
     virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
 
-    virtual status_t command(int32_t cmdCode,
-            const std::vector<uint8_t>& cmdData,
-            int32_t maxReplySize,
-            std::vector<uint8_t>* reply) = 0;
+    virtual status_t command(int32_t cmdCode, const std::vector<uint8_t>& cmdData,
+                             int32_t maxReplySize, std::vector<uint8_t>* reply)
+            EXCLUDES(audio_utils::EffectBase_Mutex) = 0;
 
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 
 private:
-    virtual status_t setEnabled(bool enabled, bool fromHandle) = 0;
-    virtual status_t setEnabled_l(bool enabled) = 0;
-    virtual void setSuspended(bool suspended) = 0;
-    virtual bool suspended() const = 0;
+    virtual status_t setEnabled(bool enabled, bool fromHandle) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t setEnabled_l(bool enabled) REQUIRES(audio_utils::EffectBase_Mutex) = 0;
+    virtual void setSuspended(bool suspended) EXCLUDES_EffectBase_Mutex = 0;
+    virtual bool suspended() const EXCLUDES_EffectBase_Mutex = 0;
 
-    virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
-    virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
-    virtual IAfEffectHandle* controlHandle_l() = 0;
+    virtual ssize_t disconnectHandle(IAfEffectHandle* handle,
+                                     bool unpinIfLast) EXCLUDES_EffectBase_Mutex = 0;
+    virtual ssize_t removeHandle_l(IAfEffectHandle* handle)
+            REQUIRES(audio_utils::EffectBase_Mutex) = 0;
+    virtual IAfEffectHandle* controlHandle_l() REQUIRES(audio_utils::EffectBase_Mutex) = 0;
 
-    virtual audio_utils::mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectBase_Mutex) = 0;
 };
 
 class IAfEffectModule : public virtual IAfEffectBase {
@@ -161,44 +163,55 @@
     virtual int16_t *inBuffer() const = 0;
     virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
     virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
-    virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller) = 0;
-    virtual status_t setOffloaded(bool offloaded, audio_io_handle_t io) = 0;
-    virtual bool isOffloaded() const = 0;
+    virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller,
+                               bool force = false) = 0;
+    virtual status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) = 0;
+    virtual bool isOffloaded_l() const = 0;
 
     virtual status_t setAudioSource(audio_source_t source) = 0;
     virtual status_t setMode(audio_mode_t mode) = 0;
 
-    virtual status_t start() = 0;
-    virtual status_t getConfigs(audio_config_base_t* inputCfg,
-            audio_config_base_t* outputCfg,
-            bool* isOutput) const = 0;
+    virtual status_t start_l() = 0;
+    virtual status_t getConfigs_l(audio_config_base_t* inputCfg, audio_config_base_t* outputCfg,
+                                  bool* isOutput) const
+            REQUIRES(audio_utils::EffectHandle_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
     static bool isHapticGenerator(const effect_uuid_t* type);
     virtual bool isHapticGenerator() const = 0;
-    virtual status_t setHapticIntensity(int id, os::HapticScale intensity) = 0;
-    virtual status_t setVibratorInfo(const media::AudioVibratorInfo& vibratorInfo) = 0;
+    static bool isSpatializer(const effect_uuid_t* type);
+    virtual bool isSpatializer() const = 0;
+
+    virtual status_t setHapticScale_l(int id, os::HapticScale hapticScale)
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo)
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
+            REQUIRES(audio_utils::ThreadBase_Mutex,
+                     audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
 private:
     virtual void process() = 0;
-    virtual bool updateState() = 0;
-    virtual void reset_l() = 0;
-    virtual status_t configure() = 0;
-    virtual status_t init() = 0;
+    virtual bool updateState_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    virtual void reset_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t configure_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual status_t init_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual uint32_t status() const = 0;
     virtual bool isProcessEnabled() const = 0;
-    virtual bool isOffloadedOrDirect() const = 0;
-    virtual bool isVolumeControlEnabled() const = 0;
+    virtual bool isOffloadedOrDirect_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual bool isVolumeControlEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
     virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual int16_t *outBuffer() const = 0;
 
     // Updates the access mode if it is out of date.  May issue a new effect configure.
-    virtual void updateAccessMode() = 0;
+    virtual void updateAccessMode_l() = 0;
 
-    virtual status_t stop() = 0;
+    virtual status_t stop_l() = 0;
     virtual void addEffectToHal_l() = 0;
-    virtual void release_l() = 0;
+    virtual void release_l(const std::string& from) = 0;
 };
 
 class IAfEffectChain : public RefBase {
@@ -206,7 +219,8 @@
 public:
     static sp<IAfEffectChain> create(
             const sp<IAfThreadBase>& thread,
-            audio_session_t sessionId);
+            audio_session_t sessionId,
+            const sp<IAfThreadCallback>& afThreadCallback);
 
     // special key used for an entry in mSuspendedEffects keyed vector
     // corresponding to a suspend all request.
@@ -216,33 +230,42 @@
     // a session is stopped or removed to allow effect tail to be rendered
     static constexpr int kProcessTailDurationMs = 1000;
 
-    virtual void process_l() = 0;
+    virtual void process_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual audio_utils::mutex& mutex() const = 0;
+    virtual audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual status_t createEffect_l(sp<IAfEffectModule>& effect,
-                            effect_descriptor_t *desc,
-                            int id,
-                            audio_session_t sessionId,
-                            bool pinned) = 0;
+    virtual status_t createEffect(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+                                    audio_session_t sessionId, bool pinned)
+            EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle) = 0;
-    virtual status_t addEffect_ll(const sp<IAfEffectModule>& handle) = 0;
-    virtual size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) = 0;
+    virtual status_t addEffect(const sp<IAfEffectModule>& handle)
+            EXCLUDES_EffectChain_Mutex = 0;
+    virtual status_t addEffect_l(const sp<IAfEffectModule>& handle)
+            REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual size_t removeEffect(const sp<IAfEffectModule>& handle,
+                                  bool release = false) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual audio_session_t sessionId() const = 0;
     virtual void setSessionId(audio_session_t sessionId) = 0;
 
-    virtual sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t *descriptor) const = 0;
-    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const = 0;
-    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t *type) const = 0;
-    virtual std::vector<int> getEffectIds() const = 0;
-    virtual bool setVolume_l(uint32_t *left, uint32_t *right, bool force = false) = 0;
-    virtual void resetVolume_l() = 0;
-    virtual void setDevices_l(const AudioDeviceTypeAddrVector &devices) = 0;
-    virtual void setInputDevice_l(const AudioDeviceTypeAddr &device) = 0;
-    virtual void setMode_l(audio_mode_t mode) = 0;
-    virtual void setAudioSource_l(audio_source_t source) = 0;
+    virtual sp<IAfEffectModule> getEffectFromDesc(effect_descriptor_t* descriptor) const
+            EXCLUDES_EffectChain_Mutex = 0;
+    virtual sp<IAfEffectModule> getEffectFromId_l(int id) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual std::vector<int> getEffectIds_l() const = 0;
+    virtual bool setVolume(uint32_t* left, uint32_t* right,
+                           bool force = false) EXCLUDES_EffectChain_Mutex = 0;
+    virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+    virtual void setDevices_l(const AudioDeviceTypeAddrVector& devices)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+    virtual void setInputDevice_l(const AudioDeviceTypeAddr& device)
+            REQUIRES(audio_utils::ThreadBase_Mutex)  EXCLUDES_EffectChain_Mutex = 0;
+    virtual void setMode_l(audio_mode_t mode)
+            REQUIRES(audio_utils::ThreadBase_Mutex)  EXCLUDES_EffectChain_Mutex = 0;
+    virtual void setAudioSource_l(audio_source_t source)
+            REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
 
     virtual void setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
     virtual float *inBuffer() const = 0;
@@ -262,20 +285,21 @@
 
     // suspend or restore effects of the specified type. The number of suspend requests is counted
     // and restore occurs once all suspend requests are cancelled.
-    virtual void setEffectSuspended_l(
-            const effect_uuid_t *type, bool suspend) = 0;
+    virtual void setEffectSuspended_l(const effect_uuid_t* type, bool suspend) = 0;
     // suspend all eligible effects
     virtual void setEffectSuspendedAll_l(bool suspend) = 0;
     // check if effects should be suspended or restored when a given effect is enable or disabled
-    virtual void checkSuspendOnEffectEnabled(const sp<IAfEffectModule>& effect, bool enabled) = 0;
+    virtual void checkSuspendOnEffectEnabled_l(const sp<IAfEffectModule>& effect, bool enabled)
+            REQUIRES(audio_utils::ThreadBase_Mutex) REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
 
-    virtual void clearInputBuffer() = 0;
+    virtual void clearInputBuffer() EXCLUDES_EffectChain_Mutex = 0;
 
     // At least one non offloadable effect in the chain is enabled
-    virtual bool isNonOffloadableEnabled() const = 0;
-    virtual bool isNonOffloadableEnabled_l() const = 0;
+    virtual bool isNonOffloadableEnabled() const EXCLUDES_EffectChain_Mutex = 0;
+    virtual bool isNonOffloadableEnabled_l() const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
-    virtual void syncHalEffectsState() = 0;
+    virtual void syncHalEffectsState_l()
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     // flags is an ORed set of audio_output_flags_t which is updated on return.
     virtual void checkOutputFlagCompatibility(audio_output_flags_t *flags) const = 0;
@@ -293,22 +317,32 @@
     virtual bool isBitPerfectCompatible() const = 0;
 
     // isCompatibleWithThread_l() must be called with thread->mLock held
-    virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const = 0;
+    virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual bool containsHapticGeneratingEffect_l() = 0;
+    virtual bool containsHapticGeneratingEffect()
+            EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual void setHapticIntensity_l(int id, os::HapticScale intensity) = 0;
+    virtual bool containsHapticGeneratingEffect_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+
+    virtual void setHapticScale_l(int id, os::HapticScale hapticScale)
+            REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
 
     virtual sp<EffectCallbackInterface> effectCallback() const = 0;
 
     virtual wp<IAfThreadBase> thread() const = 0;
-    virtual void setThread(const sp<IAfThreadBase>& thread) = 0;
+    virtual void setThread(const sp<IAfThreadBase>& thread) EXCLUDES_EffectChain_Mutex = 0;
 
-    virtual bool isFirstEffect(int id) const = 0;
+    virtual bool isFirstEffect_l(int id) const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
 
     virtual size_t numberOfEffects() const = 0;
     virtual sp<IAfEffectModule> getEffectModule(size_t index) const = 0;
 
+    // sendMetadata_l() must be called with thread->mLock held
+    virtual void sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
+        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata);
+
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 };
 
@@ -352,9 +386,8 @@
                 const sp<DeviceEffectManagerCallback>& callback,
                 effect_descriptor_t *desc, int id, bool notifyFramesProcessed);
 
-    virtual status_t init(
-            const std::map<audio_patch_handle_t,
-            IAfPatchPanel::Patch>& patches) = 0;
+    virtual status_t init_l(const std::map<audio_patch_handle_t, IAfPatchPanel::Patch>& patches)
+            REQUIRES(audio_utils::DeviceEffectManager_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual const AudioDeviceTypeAddr& device() const = 0;
 
     virtual status_t onCreatePatch(
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 46a67e8..4b6ab89 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -95,7 +95,8 @@
     virtual bool updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
             EXCLUDES_AudioFlinger_Mutex = 0;
     virtual status_t moveEffectChain_ll(audio_session_t sessionId,
-            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread)
+            IAfPlaybackThread* srcThread, IAfPlaybackThread* dstThread,
+            IAfEffectChain* srcChain = nullptr)
             REQUIRES(mutex(), audio_utils::ThreadBase_Mutex) = 0;
 
     virtual void requestLogMerge() = 0;
@@ -279,7 +280,7 @@
     // integrity of the chains during the process.
     // Also sets the parameter 'effectChains' to current value of mEffectChains.
     virtual void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
-            REQUIRES(mutex()) = 0;
+            REQUIRES(mutex()) EXCLUDES_EffectChain_Mutex = 0;
     // unlock effect chains after process
     virtual void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
             EXCLUDES_ThreadBase_Mutex = 0;
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 2302e13..8ed44c6 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -18,6 +18,7 @@
 
 #include <android/media/BnAudioRecord.h>
 #include <android/media/BnAudioTrack.h>
+#include <audio_utils/mutex.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
 #include <fastpath/FastMixerDumpState.h>
@@ -338,12 +339,12 @@
     /** Set haptic playback of the track is enabled or not, should be
      * set after query or get callback from vibrator service */
     virtual void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) = 0;
-    /** Return at what intensity to play haptics, used in mixer. */
-    virtual os::HapticScale getHapticIntensity() const = 0;
+    /** Return the haptics scale, used in mixer. */
+    virtual os::HapticScale getHapticScale() const = 0;
     /** Return the maximum amplitude allowed for haptics data, used in mixer. */
     virtual float getHapticMaxAmplitude() const = 0;
-    /** Set intensity of haptic playback, should be set after querying vibrator service. */
-    virtual void setHapticIntensity(os::HapticScale hapticIntensity) = 0;
+    /** Set scale for haptic playback, should be set after querying vibrator service. */
+    virtual void setHapticScale(os::HapticScale hapticScale) = 0;
     /** Set maximum amplitude allowed for haptic data, should be set after querying
      *  vibrator service.
      */
@@ -351,7 +352,8 @@
     virtual sp<os::ExternalVibration> getExternalVibration() const = 0;
 
     // This function should be called with holding thread lock.
-    virtual void updateTeePatches_l() = 0;
+    virtual void updateTeePatches_l() REQUIRES(audio_utils::ThreadBase_Mutex)
+            EXCLUDES_BELOW_ThreadBase_Mutex = 0;
 
     // Argument teePatchesToUpdate is by value, use std::move to optimize.
     virtual void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) = 0;
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 41c5096..1d38306 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -307,6 +307,22 @@
 
 }
 
+void MelReporter::applyAllAudioPatches() {
+    ALOGV("%s", __func__);
+
+    std::vector<IAfPatchPanel::Patch> patchesCopy;
+    {
+        audio_utils::lock_guard _laf(mAfMelReporterCallback->mutex());
+        for (const auto& patch : mAfPatchPanel->patches_l()) {
+            patchesCopy.emplace_back(patch.second);
+        }
+    }
+
+    for (const auto& patch : patchesCopy) {
+        onCreateAudioPatch(patch.mHalHandle, patch);
+    }
+}
+
 std::optional<audio_patch_handle_t> MelReporter::activePatchStreamHandle_l(
         audio_io_handle_t streamHandle) {
     for(const auto& patchIt : mActiveMelPatches) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 235dd11..0aeb225 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -27,8 +27,6 @@
 
 namespace android {
 
-constexpr static int kMaxTimestampDeltaInSec = 120;
-
 class IAfMelReporterCallback : public virtual RefBase {
 public:
     virtual audio_utils::mutex& mutex() const
@@ -45,8 +43,10 @@
 class MelReporter : public PatchCommandThread::PatchCommandListener,
                     public IMelReporterCallback {
 public:
-    explicit MelReporter(const sp<IAfMelReporterCallback>& afMelReporterCallback)
-        : mAfMelReporterCallback(afMelReporterCallback) {}
+    MelReporter(const sp<IAfMelReporterCallback>& afMelReporterCallback,
+                const sp<IAfPatchPanel>& afPatchPanel)
+        : mAfMelReporterCallback(afMelReporterCallback),
+          mAfPatchPanel(afPatchPanel) {}
 
     void onFirstRef() override;
 
@@ -80,9 +80,10 @@
 
     // IMelReporterCallback methods
     void stopMelComputationForDeviceId(audio_port_handle_t deviceId) final
-            EXCLUDES_MelReporter_Mutex;
+            EXCLUDES_AudioFlinger_Mutex EXCLUDES_MelReporter_Mutex;
     void startMelComputationForDeviceId(audio_port_handle_t deviceId) final
-            EXCLUDES_MelReporter_Mutex;
+            EXCLUDES_AudioFlinger_Mutex EXCLUDES_MelReporter_Mutex;
+    void applyAllAudioPatches() final EXCLUDES_AudioFlinger_Mutex EXCLUDES_MelReporter_Mutex;
 
     // PatchCommandListener methods
     void onCreateAudioPatch(audio_patch_handle_t handle,
@@ -131,6 +132,7 @@
     bool useHalSoundDoseInterface_l() REQUIRES(mutex());
 
     const sp<IAfMelReporterCallback> mAfMelReporterCallback;
+    const sp<IAfPatchPanel> mAfPatchPanel;
 
     /* const */ sp<SoundDoseManager> mSoundDoseManager;  // set onFirstRef
 
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index b4cb805..6c22e21 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -174,15 +174,15 @@
     void setHapticPlaybackEnabled(bool hapticPlaybackEnabled) final {
                 mHapticPlaybackEnabled = hapticPlaybackEnabled;
             }
-            /** Return at what intensity to play haptics, used in mixer. */
-    os::HapticScale getHapticIntensity() const final { return mHapticIntensity; }
+            /** Return the haptics scale, used in mixer. */
+    os::HapticScale getHapticScale() const final { return mHapticScale; }
             /** Return the maximum amplitude allowed for haptics data, used in mixer. */
     float getHapticMaxAmplitude() const final { return mHapticMaxAmplitude; }
             /** Set intensity of haptic playback, should be set after querying vibrator service. */
-    void setHapticIntensity(os::HapticScale hapticIntensity) final {
-                if (os::isValidHapticScale(hapticIntensity)) {
-                    mHapticIntensity = hapticIntensity;
-                    setHapticPlaybackEnabled(mHapticIntensity != os::HapticScale::MUTE);
+    void setHapticScale(os::HapticScale hapticScale) final {
+                if (os::isValidHapticScale(hapticScale)) {
+                    mHapticScale = hapticScale;
+                    setHapticPlaybackEnabled(!mHapticScale.isScaleMute());
                 }
             }
             /** Set maximum amplitude allowed for haptic data, should be set after querying
@@ -194,7 +194,8 @@
     sp<os::ExternalVibration> getExternalVibration() const final { return mExternalVibration; }
 
             // This function should be called with holding thread lock.
-    void updateTeePatches_l() final;
+    void updateTeePatches_l() final REQUIRES(audio_utils::ThreadBase_Mutex)
+            EXCLUDES_BELOW_ThreadBase_Mutex;
     void setTeePatchesToUpdate_l(TeePatches teePatchesToUpdate) final;
 
     void tallyUnderrunFrames(size_t frames) final {
@@ -328,8 +329,8 @@
     sp<OpPlayAudioMonitor>  mOpPlayAudioMonitor;
 
     bool                mHapticPlaybackEnabled = false; // indicates haptic playback enabled or not
-    // intensity to play haptic data
-    os::HapticScale mHapticIntensity = os::HapticScale::MUTE;
+    // scale to play haptic data
+    os::HapticScale mHapticScale = os::HapticScale::mute();
     // max amplitude allowed for haptic data
     float mHapticMaxAmplitude = NAN;
     class AudioVibrationController : public os::BnExternalVibrationController {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1d7c356..bcd8b99 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -47,6 +47,7 @@
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <binder/PersistableBundle.h>
+#include <com_android_media_audio.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <fastpath/AutoPark.h>
@@ -222,6 +223,8 @@
 static const int kPriorityAudioApp = 2;
 static const int kPriorityFastMixer = 3;
 static const int kPriorityFastCapture = 3;
+// Request real-time priority for PlaybackThread in ARC
+static const int kPriorityPlaybackThreadArc = 1;
 
 // IAudioFlinger::createTrack() has an in/out parameter 'pFrameCount' for the total size of the
 // track buffer in shared memory.  Zero on input means to use a default value.  For fast tracks,
@@ -691,6 +694,10 @@
     }
     // When Thread::requestExitAndWait is made virtual and this method is renamed to
     // "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();"
+
+    // For TimeCheck: track waiting on the thread join of getTid().
+    audio_utils::mutex::scoped_join_wait_check sjw(getTid());
+
     requestExitAndWait();
 }
 
@@ -1484,7 +1491,7 @@
         return BAD_VALUE;
     }
 
-    if (memcmp(&desc->type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0
+    if (IAfEffectModule::isSpatializer(&desc->type)
             && mType != SPATIALIZER) {
         ALOGW("%s: attempt to create a spatializer effect on a thread of type %d",
                 __func__, mType);
@@ -1572,7 +1579,7 @@
             return BAD_VALUE;
         } else if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
             // only post processing , downmixer or spatializer effects on output stage session
-            if (memcmp(&desc->type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0
+            if (IAfEffectModule::isSpatializer(&desc->type)
                     || memcmp(&desc->type, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) {
                 break;
             }
@@ -1659,12 +1666,12 @@
         if (chain == 0) {
             // create a new chain for this session
             ALOGV("createEffect_l() new effect chain for session %d", sessionId);
-            chain = IAfEffectChain::create(this, sessionId);
+            chain = IAfEffectChain::create(this, sessionId, mAfThreadCallback);
             addEffectChain_l(chain);
             chain->setStrategy(getStrategyForSession_l(sessionId));
             chainCreated = true;
         } else {
-            effect = chain->getEffectFromDesc_l(desc);
+            effect = chain->getEffectFromDesc(desc);
         }
 
         ALOGV("createEffect_l() got effect %p on chain %p", effect.get(), chain.get());
@@ -1672,7 +1679,7 @@
         if (effect == 0) {
             effectId = mAfThreadCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
             // create a new effect module if none present in the chain
-            lStatus = chain->createEffect_l(effect, desc, effectId, sessionId, pinned);
+            lStatus = chain->createEffect(effect, desc, effectId, sessionId, pinned);
             if (lStatus != NO_ERROR) {
                 goto Exit;
             }
@@ -1690,8 +1697,9 @@
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
                     std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
             if (defaultVibratorInfo) {
+                audio_utils::lock_guard _cl(chain->mutex());
                 // Only set the vibrator info when it is a valid one.
-                effect->setVibratorInfo(*defaultVibratorInfo);
+                effect->setVibratorInfo_l(*defaultVibratorInfo);
             }
         }
         // create effect handle and connect it to effect module
@@ -1711,7 +1719,7 @@
     if (!probe && lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
         audio_utils::lock_guard _l(mutex());
         if (effectCreated) {
-            chain->removeEffect_l(effect);
+            chain->removeEffect(effect);
         }
         if (chainCreated) {
             removeEffectChain_l(chain);
@@ -1793,7 +1801,7 @@
 std::vector<int> ThreadBase::getEffectIds_l(audio_session_t sessionId) const
 {
     sp<IAfEffectChain> chain = getEffectChain_l(sessionId);
-    return chain != nullptr ? chain->getEffectIds() : std::vector<int>{};
+    return chain != nullptr ? chain->getEffectIds_l() : std::vector<int>{};
 }
 
 // PlaybackThread::addEffect_ll() must be called with AudioFlinger::mutex() and
@@ -1812,7 +1820,7 @@
     if (chain == 0) {
         // create a new chain for this session
         ALOGV("%s: new effect chain for session %d", __func__, sessionId);
-        chain = IAfEffectChain::create(this, sessionId);
+        chain = IAfEffectChain::create(this, sessionId, mAfThreadCallback);
         addEffectChain_l(chain);
         chain->setStrategy(getStrategyForSession_l(sessionId));
         chainCreated = true;
@@ -1825,9 +1833,9 @@
         return BAD_VALUE;
     }
 
-    effect->setOffloaded(mType == OFFLOAD, mId);
+    effect->setOffloaded_l(mType == OFFLOAD, mId);
 
-    status_t status = chain->addEffect_l(effect);
+    status_t status = chain->addEffect(effect);
     if (status != NO_ERROR) {
         if (chainCreated) {
             removeEffectChain_l(chain);
@@ -1854,7 +1862,7 @@
     sp<IAfEffectChain> chain = effect->getCallback()->chain().promote();
     if (chain != 0) {
         // remove effect chain if removing last effect
-        if (chain->removeEffect_l(effect, release) == 0) {
+        if (chain->removeEffect(effect, release) == 0) {
             removeEffectChain_l(chain);
         }
     } else {
@@ -1862,22 +1870,20 @@
     }
 }
 
-void ThreadBase::lockEffectChains_l(
-        Vector<sp<IAfEffectChain>>& effectChains)
-NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
+void ThreadBase::lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains)
+        NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::lock()
 {
     effectChains = mEffectChains;
-    for (size_t i = 0; i < mEffectChains.size(); i++) {
-        mEffectChains[i]->mutex().lock();
+    for (const auto& effectChain : effectChains) {
+        effectChain->mutex().lock();
     }
 }
 
-void ThreadBase::unlockEffectChains(
-        const Vector<sp<IAfEffectChain>>& effectChains)
-NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
+void ThreadBase::unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains)
+        NO_THREAD_SAFETY_ANALYSIS  // calls EffectChain::unlock()
 {
-    for (size_t i = 0; i < effectChains.size(); i++) {
-        effectChains[i]->mutex().unlock();
+    for (const auto& effectChain : effectChains) {
+        effectChain->mutex().unlock();
     }
 }
 
@@ -2896,11 +2902,11 @@
         sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
         if (mHapticChannelMask != AUDIO_CHANNEL_NONE
                 && ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect()))) {
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
             mutex().unlock();
-            const os::HapticScale intensity = afutils::onExternalVibrationStart(
+            const os::HapticScale hapticScale = afutils::onExternalVibrationStart(
                     track->getExternalVibration());
             std::optional<media::AudioVibratorInfo> vibratorInfo;
             {
@@ -2910,7 +2916,7 @@
                 vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
             }
             mutex().lock();
-            track->setHapticIntensity(intensity);
+            track->setHapticScale(hapticScale);
             if (vibratorInfo) {
                 track->setHapticMaxAmplitude(vibratorInfo->maxAmplitude);
             }
@@ -2926,7 +2932,7 @@
 
             // Set haptic intensity for effect
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), intensity);
+                chain->setHapticScale_l(track->id(), hapticScale);
             }
         }
 
@@ -3319,10 +3325,48 @@
         return {}; // nothing to do
     }
     StreamOutHalInterface::SourceMetadata metadata;
-    auto backInserter = std::back_inserter(metadata.tracks);
-    for (const sp<IAfTrack>& track : mActiveTracks) {
-        // No track is invalid as this is called after prepareTrack_l in the same critical section
-        track->copyMetadataTo(backInserter);
+    static const bool stereo_spatialization_property =
+            property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+    const bool stereo_spatialization_enabled =
+            stereo_spatialization_property && com_android_media_audio_stereo_spatialization();
+    if (stereo_spatialization_enabled) {
+        std::map<audio_session_t, std::vector<playback_track_metadata_v7_t> >allSessionsMetadata;
+        for (const sp<IAfTrack>& track : mActiveTracks) {
+            std::vector<playback_track_metadata_v7_t>& sessionMetadata =
+                    allSessionsMetadata[track->sessionId()];
+            auto backInserter = std::back_inserter(sessionMetadata);
+            // No track is invalid as this is called after prepareTrack_l in the same
+            // critical section
+            track->copyMetadataTo(backInserter);
+        }
+        std::vector<playback_track_metadata_v7_t> spatializedTracksMetaData;
+        for (const auto& [session, sessionTrackMetadata] : allSessionsMetadata) {
+            metadata.tracks.insert(metadata.tracks.end(),
+                    sessionTrackMetadata.begin(), sessionTrackMetadata.end());
+            if (auto chain = getEffectChain_l(session) ; chain != nullptr) {
+                chain->sendMetadata_l(sessionTrackMetadata, {});
+            }
+            if ((hasAudioSession_l(session) & IAfThreadBase::SPATIALIZED_SESSION) != 0) {
+                spatializedTracksMetaData.insert(spatializedTracksMetaData.end(),
+                        sessionTrackMetadata.begin(), sessionTrackMetadata.end());
+            }
+        }
+        if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX); chain != nullptr) {
+            chain->sendMetadata_l(metadata.tracks, {});
+        }
+        if (auto chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_STAGE); chain != nullptr) {
+            chain->sendMetadata_l(metadata.tracks, spatializedTracksMetaData);
+        }
+        if (auto chain = getEffectChain_l(AUDIO_SESSION_DEVICE); chain != nullptr) {
+            chain->sendMetadata_l(metadata.tracks, {});
+        }
+    } else {
+        auto backInserter = std::back_inserter(metadata.tracks);
+        for (const sp<IAfTrack>& track : mActiveTracks) {
+            // No track is invalid as this is called after prepareTrack_l in the same
+            // critical section
+            track->copyMetadataTo(backInserter);
+        }
     }
     sendMetadataToBackend_l(metadata);
     MetadataUpdate change;
@@ -3469,7 +3513,7 @@
             char *endptr;
             unsigned long ul = strtoul(value, &endptr, 0);
             if (*endptr == '\0' && ul != 0) {
-                ALOGD("Silence is golden");
+                ALOGW("%s: mute from ro.audio.silent. Silence is golden", __func__);
                 // The setprop command will not allow a property to be changed after
                 // the first time it is set, so we don't have to worry about un-muting.
                 setMasterMute_l(true);
@@ -3917,6 +3961,27 @@
                 stream()->setHalThreadPriority(priorityBoost);
             }
         }
+    } else if (property_get_bool("ro.boot.container", false /* default_value */)) {
+        // In ARC experiments (b/73091832), the latency under using CFS scheduler with any priority
+        // is not enough for PlaybackThread to process audio data in time. We request the lowest
+        // real-time priority, SCHED_FIFO=1, for PlaybackThread in ARC. ro.boot.container is true
+        // only on ARC.
+        const pid_t tid = getTid();
+        if (tid == -1) {
+            ALOGW("%s: Cannot update PlaybackThread priority for ARC, no tid", __func__);
+        } else {
+            const status_t status = requestPriority(getpid(),
+                                                    tid,
+                                                    kPriorityPlaybackThreadArc,
+                                                    false /* isForApp */,
+                                                    true /* asynchronous */);
+            if (status != OK) {
+                ALOGW("%s: Cannot update PlaybackThread priority for ARC, status %d", __func__,
+                        status);
+            } else {
+                stream()->setHalThreadPriority(kPriorityPlaybackThreadArc);
+            }
+        }
     }
 
     Vector<sp<IAfTrack>> tracksToRemove;
@@ -4128,6 +4193,30 @@
 
             metadataUpdate = updateMetadata_l();
 
+            // Acquire a local copy of active tracks with lock (release w/o lock).
+            //
+            // Control methods on the track acquire the ThreadBase lock (e.g. start()
+            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
+            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
+            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
+
+            setHalLatencyMode_l();
+
+            // updateTeePatches_l will acquire the ThreadBase_Mutex of other threads,
+            // so this is done before we lock our effect chains.
+            for (const auto& track : mActiveTracks) {
+                track->updateTeePatches_l();
+            }
+
+            // signal actual start of output stream when the render position reported by
+            // the kernel starts moving.
+            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
+                    && (mKernelPositionOnStandby
+                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
+                mHalStarted = true;
+                mWaitHalStartCV.notify_all();
+            }
+
             // prevent any changes in effect chain list and in each effect chain
             // during mixing and effect process as the audio buffers could be deleted
             // or modified if an effect is created or deleted
@@ -4155,28 +4244,6 @@
                     }
                 }
             }
-
-            // Acquire a local copy of active tracks with lock (release w/o lock).
-            //
-            // Control methods on the track acquire the ThreadBase lock (e.g. start()
-            // stop(), pause(), etc.), but the threadLoop is entitled to call audio
-            // data / buffer methods on tracks from activeTracks without the ThreadBase lock.
-            activeTracks.insert(activeTracks.end(), mActiveTracks.begin(), mActiveTracks.end());
-
-            setHalLatencyMode_l();
-
-            for (const auto &track : mActiveTracks ) {
-                track->updateTeePatches_l();
-            }
-
-            // signal actual start of output stream when the render position reported by the kernel
-            // starts moving.
-            if (!mHalStarted && ((isSuspended() && (mBytesWritten != 0)) || (!mStandby
-                    && (mKernelPositionOnStandby
-                            != mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL])))) {
-                mHalStarted = true;
-                mWaitHalStartCV.notify_all();
-            }
         } // mutex() scope ends
 
         if (mBytesRemaining == 0) {
@@ -4738,7 +4805,7 @@
         }
         if (mHapticChannelCount > 0 &&
                 ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
-                        || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
+                        || (chain != nullptr && chain->containsHapticGeneratingEffect()))) {
             mutex().unlock();
             // Unlock due to VibratorService will lock for this call and will
             // call Tracks.mute/unmute which also require thread's lock.
@@ -4748,7 +4815,7 @@
             // When the track is stop, set the haptic intensity as MUTE
             // for the HapticGenerator effect.
             if (chain != nullptr) {
-                chain->setHapticIntensity_l(track->id(), os::HapticScale::MUTE);
+                chain->setHapticScale_l(track->id(), os::HapticScale::mute());
             }
         }
 
@@ -5108,7 +5175,7 @@
                                                     // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticIntensity = os::HapticScale::NONE;
+        fastTrack->mHapticScale = {/*level=*/os::HapticLevel::NONE };
         fastTrack->mHapticMaxAmplitude = NAN;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
@@ -5467,7 +5534,7 @@
     sp<IAfEffectChain> chain = getEffectChain_l(AUDIO_SESSION_OUTPUT_MIX);
     if (chain != 0) {
         uint32_t v = (uint32_t)(masterVolume * (1 << 24));
-        chain->setVolume_l(&v, &v);
+        chain->setVolume(&v, &v);
         masterVolume = (float)((v + (1 << 23)) >> 24);
         chain.clear();
     }
@@ -5666,7 +5733,7 @@
                     fastTrack->mChannelMask = track->channelMask();
                     fastTrack->mFormat = track->format();
                     fastTrack->mHapticPlaybackEnabled = track->getHapticPlaybackEnabled();
-                    fastTrack->mHapticIntensity = track->getHapticIntensity();
+                    fastTrack->mHapticScale = track->getHapticScale();
                     fastTrack->mHapticMaxAmplitude = track->getHapticMaxAmplitude();
                     fastTrack->mGeneration++;
                     state->mTrackMask |= 1 << j;
@@ -5802,7 +5869,7 @@
 
             mixedTracks++;
 
-            // track->mainBuffer() != mSinkBuffer or mMixerBuffer means
+            // track->mainBuffer() != mSinkBuffer and mMixerBuffer means
             // there is an effect chain connected to the track
             chain.clear();
             if (track->mainBuffer() != mSinkBuffer &&
@@ -5906,7 +5973,7 @@
             track->setFinalVolume(vrf, vlf);
 
             // Delegate volume control to effect in track effect chain if needed
-            if (chain != 0 && chain->setVolume_l(&vl, &vr)) {
+            if (chain != 0 && chain->setVolume(&vl, &vr)) {
                 // Do not ramp volume if volume is controlled by effect
                 param = AudioMixer::VOLUME;
                 // Update remaining floating point volume levels
@@ -6027,10 +6094,11 @@
                 trackId,
                 AudioMixer::TRACK,
                 AudioMixer::HAPTIC_ENABLED, (void *)(uintptr_t)track->getHapticPlaybackEnabled());
+            const os::HapticScale hapticScale = track->getHapticScale();
             mAudioMixer->setParameter(
-                trackId,
-                AudioMixer::TRACK,
-                AudioMixer::HAPTIC_INTENSITY, (void *)(uintptr_t)track->getHapticIntensity());
+                    trackId,
+                    AudioMixer::TRACK,
+                    AudioMixer::HAPTIC_SCALE, (void *)&hapticScale);
             const float hapticMaxAmplitude = track->getHapticMaxAmplitude();
             mAudioMixer->setParameter(
                 trackId,
@@ -6646,8 +6714,8 @@
                 // Convert volumes from float to 8.24
                 uint32_t vl = (uint32_t)(left * (1 << 24));
                 uint32_t vr = (uint32_t)(right * (1 << 24));
-                // Direct/Offload effect chains set output volume in setVolume_l().
-                (void)mEffectChains[0]->setVolume_l(&vl, &vr);
+                // Direct/Offload effect chains set output volume in setVolume().
+                (void)mEffectChains[0]->setVolume(&vl, &vr);
             } else {
                 // otherwise we directly set the volume.
                 setVolumeForOutput_l(left, right);
@@ -7831,6 +7899,11 @@
     if (mSupportedLatencyModes.empty()) {
         return;
     }
+    // Do not update the HAL latency mode if no track is active
+    if (mActiveTracks.isEmpty()) {
+        return;
+    }
+
     audio_latency_mode_t latencyMode = AUDIO_LATENCY_MODE_FREE;
     if (mSupportedLatencyModes.size() == 1) {
         // If the HAL only support one latency mode currently, confirm the choice
@@ -7841,16 +7914,12 @@
         //   (mRequestedLatencyMode = AUDIO_LATENCY_MODE_LOW)
         //      AND
         // - At least one active track is spatialized
-        bool hasSpatializedActiveTrack = false;
         for (const auto& track : mActiveTracks) {
             if (track->isSpatialized()) {
-                hasSpatializedActiveTrack = true;
+                latencyMode = mRequestedLatencyMode;
                 break;
             }
         }
-        if (hasSpatializedActiveTrack && mRequestedLatencyMode == AUDIO_LATENCY_MODE_LOW) {
-            latencyMode = AUDIO_LATENCY_MODE_LOW;
-        }
     }
 
     if (latencyMode != mSetLatencyMode) {
@@ -7864,7 +7933,7 @@
 }
 
 status_t SpatializerThread::setRequestedLatencyMode(audio_latency_mode_t mode) {
-    if (mode != AUDIO_LATENCY_MODE_LOW && mode != AUDIO_LATENCY_MODE_FREE) {
+    if (mode < 0 || mode >= AUDIO_LATENCY_MODE_CNT) {
         return BAD_VALUE;
     }
     audio_utils::lock_guard _l(mutex());
@@ -9715,7 +9784,7 @@
 
     // make sure enabled pre processing effects state is communicated to the HAL as we
     // just moved them to a new input stream.
-    chain->syncHalEffectsState();
+    chain->syncHalEffectsState_l();
 
     mEffectChains.add(chain);
 
@@ -10606,6 +10675,16 @@
         }
     }
 
+    // For mmap streams, once the routing has changed, they will be disconnected. It should be
+    // okay to notify the client earlier before the new patch creation.
+    if (mDeviceId != deviceId) {
+        if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+            // The aaudioservice handle the routing changed event asynchronously. In that case,
+            // it is safe to hold the lock here.
+            callback->onRoutingChanged(deviceId);
+        }
+    }
+
     if (mAudioHwDev->supportsAudioPatches()) {
         status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
                                               patch->sinks, handle);
@@ -10631,12 +10710,6 @@
             sendIoConfigEvent_l(AUDIO_INPUT_CONFIG_CHANGED);
             mInDeviceTypeAddr = sourceDeviceTypeAddr;
         }
-        sp<MmapStreamCallback> callback = mCallback.promote();
-        if (mDeviceId != deviceId && callback != 0) {
-            mutex().unlock();
-            callback->onRoutingChanged(deviceId);
-            mutex().lock();
-        }
         mPatch = *patch;
         mDeviceId = deviceId;
     }
@@ -10700,7 +10773,7 @@
     chain->setThread(this);
     chain->setInBuffer(nullptr);
     chain->setOutBuffer(nullptr);
-    chain->syncHalEffectsState();
+    chain->syncHalEffectsState_l();
 
     mEffectChains.add(chain);
     checkSuspendOnAddEffectChain_l(chain);
@@ -10788,22 +10861,19 @@
 
 void MmapThread::checkInvalidTracks_l()
 {
-    sp<MmapStreamCallback> callback;
     for (const sp<IAfMmapTrack>& track : mActiveTracks) {
         if (track->isInvalid()) {
-            callback = mCallback.promote();
-            if (callback == nullptr &&  mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+            if (const sp<MmapStreamCallback> callback = mCallback.promote()) {
+                // The aaudioservice handle the routing changed event asynchronously. In that case,
+                // it is safe to hold the lock here.
+                callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+            } else if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
                 ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
                 mNoCallbackWarningCount++;
             }
             break;
         }
     }
-    if (callback != 0) {
-        mutex().unlock();
-        callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
-        mutex().lock();
-    }
 }
 
 void MmapThread::dumpInternals_l(int fd, const Vector<String16>& /* args */)
@@ -10993,7 +11063,7 @@
         // only one effect chain can be present on DirectOutputThread, so if
         // there is one, the track is connected to it
         if (!mEffectChains.isEmpty()) {
-            mEffectChains[0]->setVolume_l(&vol, &vol);
+            mEffectChains[0]->setVolume(&vol, &vol);
             volume = (float)vol / (1 << 24);
         }
         // Try to use HW volume control and fall back to SW control if not implemented
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index ddf0669..86e1894 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -436,9 +436,11 @@
                 // ThreadBase mutex before processing the mixer and effects. This guarantees the
                 // integrity of the chains during the process.
                 // Also sets the parameter 'effectChains' to current value of mEffectChains.
-    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final REQUIRES(mutex());
+    void lockEffectChains_l(Vector<sp<IAfEffectChain>>& effectChains) final
+            REQUIRES(audio_utils::ThreadBase_Mutex) ACQUIRE(audio_utils::EffectChain_Mutex);
                 // unlock effect chains after process
-    void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final;
+    void unlockEffectChains(const Vector<sp<IAfEffectChain>>& effectChains) final
+            RELEASE(audio_utils::EffectChain_Mutex);
                 // get a copy of mEffectChains vector
     Vector<sp<IAfEffectChain>> getEffectChains_l() const final REQUIRES(mutex()) {
         return mEffectChains;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 77abaf6..8fa4b06 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -389,6 +389,7 @@
       mTrack(track)
 {
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 TrackHandle::~TrackHandle() {
@@ -1233,7 +1234,7 @@
                 && (state == IDLE || state == STOPPED || state == FLUSHED)) {
             mFrameMap.reset();
 
-            if (!isFastTrack() && (isDirect() || isOffloaded())) {
+            if (!isFastTrack()) {
                 // Start point of track -> sink frame map. If the HAL returns a
                 // frame position smaller than the first written frame in
                 // updateTrackFrameInfo, the timestamp can be interpolated
@@ -2641,6 +2642,7 @@
     mRecordTrack(recordTrack)
 {
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 RecordHandle::~RecordHandle() {
diff --git a/services/audioflinger/afutils/Vibrator.cpp b/services/audioflinger/afutils/Vibrator.cpp
index 25fcc6a..7c99ca9 100644
--- a/services/audioflinger/afutils/Vibrator.cpp
+++ b/services/audioflinger/afutils/Vibrator.cpp
@@ -20,6 +20,7 @@
 
 #include "Vibrator.h"
 
+#include <android/os/ExternalVibrationScale.h>
 #include <android/os/IExternalVibratorService.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -44,12 +45,17 @@
 }
 
 os::HapticScale onExternalVibrationStart(const sp<os::ExternalVibration>& externalVibration) {
+    if (externalVibration->getAudioAttributes().flags & AUDIO_FLAG_MUTE_HAPTIC) {
+        ALOGD("%s, mute haptic according to audio attributes flag", __func__);
+        return os::HapticScale::mute();
+    }
     const sp<os::IExternalVibratorService> evs = getExternalVibratorService();
     if (evs != nullptr) {
-        int32_t ret;
+
+        os::ExternalVibrationScale ret;
         binder::Status status = evs->onExternalVibrationStart(*externalVibration, &ret);
         if (status.isOk()) {
-            ALOGD("%s, start external vibration with intensity as %d", __func__, ret);
+            ALOGD("%s, start external vibration with intensity as %d", __func__, ret.scaleLevel);
             return os::ExternalVibration::externalVibrationScaleToHapticScale(ret);
         }
     }
@@ -57,7 +63,7 @@
             __func__,
             evs == nullptr ? "external vibration service not found"
                            : "error when querying intensity");
-    return os::HapticScale::MUTE;
+    return os::HapticScale::mute();
 }
 
 void onExternalVibrationStop(const sp<os::ExternalVibration>& externalVibration) {
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index 9851f3a..aad538f 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -64,7 +64,7 @@
     int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
     (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
 
-    if (deltaHalPosition > 0) {
+    if (deltaHalPosition >= 0) {
         mRenderPosition += deltaHalPosition;
     } else if (mExpectRetrograde) {
         mExpectRetrograde = false;
diff --git a/services/audioflinger/fastpath/FastMixer.cpp b/services/audioflinger/fastpath/FastMixer.cpp
index e0a15c1..1d41b3f 100644
--- a/services/audioflinger/fastpath/FastMixer.cpp
+++ b/services/audioflinger/fastpath/FastMixer.cpp
@@ -178,8 +178,8 @@
                 (void *)(uintptr_t)mSinkChannelMask);
         mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
                 (void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
-        mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
-                (void *)(uintptr_t)fastTrack->mHapticIntensity);
+        mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_SCALE,
+                (void *)(&(fastTrack->mHapticScale)));
         mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_MAX_AMPLITUDE,
                 (void *)(&(fastTrack->mHapticMaxAmplitude)));
 
diff --git a/services/audioflinger/fastpath/FastMixerState.h b/services/audioflinger/fastpath/FastMixerState.h
index 8ab6d25..0a56f92 100644
--- a/services/audioflinger/fastpath/FastMixerState.h
+++ b/services/audioflinger/fastpath/FastMixerState.h
@@ -54,7 +54,7 @@
     audio_format_t          mFormat = AUDIO_FORMAT_INVALID;         // track format
     int                     mGeneration = 0;     // increment when any field is assigned
     bool                    mHapticPlaybackEnabled = false; // haptic playback is enabled or not
-    os::HapticScale         mHapticIntensity = os::HapticScale::MUTE; // intensity of haptic data
+    os::HapticScale mHapticScale = os::HapticScale::mute(); // scale of haptic data
     float                   mHapticMaxAmplitude = NAN; // max amplitude allowed for haptic data
 };
 
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 1ff08dc..3b764d1 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -21,10 +21,12 @@
 #include "SoundDoseManager.h"
 
 #include "android/media/SoundDoseRecord.h"
+#include <algorithm>
 #include <android-base/stringprintf.h>
-#include <media/AidlConversionCppNdk.h>
 #include <cinttypes>
 #include <ctime>
+#include <functional>
+#include <media/AidlConversionCppNdk.h>
 #include <utils/Log.h>
 
 namespace android {
@@ -46,6 +48,8 @@
     return now_ts.tv_sec;
 }
 
+constexpr float kDefaultRs2LowerBound = 80.f;  // dBA
+
 }  // namespace
 
 sp<audio_utils::MelProcessor> SoundDoseManager::getOrCreateProcessorForDevice(
@@ -53,7 +57,7 @@
         size_t channelCount, audio_format_t format) {
     const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose.size() > 0 && mEnabledCsd) {
+    if (!mUseFrameworkMel && mHalSoundDose.size() > 0 && mEnabledCsd) {
         ALOGD("%s: using HAL MEL computation, no MelProcessor needed.", __func__);
         return nullptr;
     }
@@ -143,7 +147,7 @@
     ALOGV("%s", __func__);
     const std::lock_guard _l(mLock);
 
-    if (mHalSoundDose.size() > 0) {
+    if (!mUseFrameworkMel && mHalSoundDose.size() > 0) {
         bool success = true;
         for (auto& halSoundDose : mHalSoundDose) {
             // using the HAL sound dose interface
@@ -187,6 +191,21 @@
     }
 }
 
+float SoundDoseManager::getAttenuationForDeviceId(audio_port_handle_t id) const {
+    float attenuation = 0.f;
+
+    const std::lock_guard _l(mLock);
+    const auto deviceTypeIt = mActiveDeviceTypes.find(id);
+    if (deviceTypeIt != mActiveDeviceTypes.end()) {
+        auto attenuationIt = mMelAttenuationDB.find(deviceTypeIt->second);
+        if (attenuationIt != mMelAttenuationDB.end()) {
+            attenuation = attenuationIt->second;
+        }
+    }
+
+    return attenuation;
+}
+
 audio_port_handle_t SoundDoseManager::getIdForAudioDevice(const AudioDevice& audioDevice) const {
     if (isComputeCsdForcedOnAllDevices()) {
         // If CSD is forced on all devices return random port id. Used only in testing.
@@ -212,6 +231,15 @@
         ALOGI("%s: could not find port id for device %s", __func__, adt.toString().c_str());
         return AUDIO_PORT_HANDLE_NONE;
     }
+
+    if (audio_is_ble_out_device(type) || audio_is_a2dp_device(type)) {
+        const auto btDeviceIt = mBluetoothDevicesWithCsd.find(std::make_pair(address, type));
+        if (btDeviceIt == mBluetoothDevicesWithCsd.end() || !btDeviceIt->second) {
+            ALOGI("%s: bt device %s does not support sound dose", __func__,
+                  adt.toString().c_str());
+            return AUDIO_PORT_HANDLE_NONE;
+        }
+    }
     return deviceIt->second;
 }
 
@@ -260,7 +288,11 @@
                 in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
-    soundDoseManager->onMomentaryExposure(in_currentDbA, id);
+
+    float attenuation = soundDoseManager->getAttenuationForDeviceId(id);
+    ALOGV("%s: attenuating received momentary exposure with %f dB", __func__, attenuation);
+    // TODO: remove attenuation when enforcing HAL MELs to always be attenuated
+    soundDoseManager->onMomentaryExposure(in_currentDbA - attenuation, id);
 
     return ndk::ScopedAStatus::ok();
 }
@@ -289,9 +321,10 @@
                 in_audioDevice.address.toString().c_str());
         return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
     }
+
     // TODO: introduce timestamp in onNewMelValues callback
-    soundDoseManager->onNewMelValues(in_melRecord.melValues, 0,
-                                     in_melRecord.melValues.size(), id);
+    soundDoseManager->onNewMelValues(in_melRecord.melValues, 0, in_melRecord.melValues.size(),
+                                     id, /*attenuated=*/false);
 
     return ndk::ScopedAStatus::ok();
 }
@@ -549,9 +582,6 @@
 }
 
 void SoundDoseManager::setUseFrameworkMel(bool useFrameworkMel) {
-    // invalidate any HAL sound dose interface used
-    resetHalSoundDoseInterfaces();
-
     const std::lock_guard _l(mLock);
     mUseFrameworkMel = useFrameworkMel;
 }
@@ -562,8 +592,19 @@
 }
 
 void SoundDoseManager::setComputeCsdOnAllDevices(bool computeCsdOnAllDevices) {
-    const std::lock_guard _l(mLock);
-    mComputeCsdOnAllDevices = computeCsdOnAllDevices;
+    bool changed = false;
+    {
+        const std::lock_guard _l(mLock);
+        if (mHalSoundDose.size() != 0) {
+            // when using the HAL path we cannot enforce to deliver values for all devices
+            changed = mUseFrameworkMel != computeCsdOnAllDevices;
+            mUseFrameworkMel = computeCsdOnAllDevices;
+        }
+        mComputeCsdOnAllDevices = computeCsdOnAllDevices;
+    }
+    if (changed && computeCsdOnAllDevices) {
+        mMelReporterCallback->applyAllAudioPatches();
+    }
 }
 
 bool SoundDoseManager::isComputeCsdForcedOnAllDevices() const {
@@ -582,7 +623,7 @@
 
 bool SoundDoseManager::useHalSoundDose() const {
     const std::lock_guard _l(mLock);
-    return mHalSoundDose.size() > 0;
+    return !mUseFrameworkMel && mHalSoundDose.size() > 0;
 }
 
 void SoundDoseManager::resetSoundDose() {
@@ -604,26 +645,68 @@
 }
 
 void SoundDoseManager::onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
-                                      audio_port_handle_t deviceId) const {
+                                      audio_port_handle_t deviceId, bool attenuated) const {
     ALOGV("%s", __func__);
 
-
     sp<media::ISoundDoseCallback> soundDoseCallback;
     std::vector<audio_utils::CsdRecord> records;
     float currentCsd;
+
+    // TODO: delete this case when enforcing HAL MELs to always be attenuated
+    float attenuation = attenuated ? 0.0f : getAttenuationForDeviceId(deviceId);
+
     {
         const std::lock_guard _l(mLock);
         if (!mEnabledCsd) {
             return;
         }
 
-
         const int64_t timestampSec = getMonotonicSecond();
 
-        // only for internal callbacks
-        records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
-                deviceId, std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
-                timestampSec - length));
+        if (attenuated) {
+            records = mMelAggregator->aggregateAndAddNewMelRecord(audio_utils::MelRecord(
+                    deviceId,
+                    std::vector<float>(mels.begin() + offset, mels.begin() + offset + length),
+                    timestampSec - length));
+        } else {
+            ALOGV("%s: attenuating received values with %f dB", __func__, attenuation);
+
+            // Extracting all intervals that contain values >= RS2 low limit (80dBA) after the
+            // attenuation is applied
+            size_t start = offset;
+            size_t stop = offset;
+            for (; stop < mels.size() && stop < offset + length; ++stop) {
+                if (mels[stop] - attenuation < kDefaultRs2LowerBound) {
+                    if (start < stop) {
+                        std::vector<float> attMel(stop-start, -attenuation);
+                        // attMel[i] = mels[i] + attenuation, i in [start, stop)
+                        std::transform(mels.begin() + start, mels.begin() + stop, attMel.begin(),
+                                       attMel.begin(), std::plus<float>());
+                        std::vector<audio_utils::CsdRecord> newRec =
+                                mMelAggregator->aggregateAndAddNewMelRecord(
+                                        audio_utils::MelRecord(deviceId,
+                                                               attMel,
+                                                               timestampSec - length + start -
+                                                               offset));
+                        std::copy(newRec.begin(), newRec.end(), std::back_inserter(records));
+                    }
+                    start = stop+1;
+                }
+            }
+            if (start < stop) {
+                std::vector<float> attMel(stop-start, -attenuation);
+                // attMel[i] = mels[i] + attenuation, i in [start, stop)
+                std::transform(mels.begin() + start, mels.begin() + stop, attMel.begin(),
+                               attMel.begin(), std::plus<float>());
+                std::vector<audio_utils::CsdRecord> newRec =
+                        mMelAggregator->aggregateAndAddNewMelRecord(
+                                audio_utils::MelRecord(deviceId,
+                                                       attMel,
+                                                       timestampSec - length + start -
+                                                       offset));
+                std::copy(newRec.begin(), newRec.end(), std::back_inserter(records));
+            }
+        }
 
         currentCsd = mMelAggregator->getCsd();
     }
@@ -658,6 +741,10 @@
         if (!mEnabledCsd) {
             return;
         }
+
+        if (currentMel < mRs2UpperBound) {
+            return;
+        }
     }
 
     auto soundDoseCallback = getSoundDoseCallback();
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 347eabe..52a3fd6 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -39,6 +39,8 @@
 
     virtual void stopMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
     virtual void startMelComputationForDeviceId(audio_port_handle_t deviceId) = 0;
+
+    virtual void applyAllAudioPatches() = 0;
 };
 
 class SoundDoseManager : public audio_utils::MelProcessor::MelCallback {
@@ -53,6 +55,13 @@
           mMelAggregator(sp<audio_utils::MelAggregator>::make(kCsdWindowSeconds)),
           mRs2UpperBound(kDefaultRs2UpperBound) {};
 
+    // Used only for testing
+    SoundDoseManager(const sp<IMelReporterCallback>& melReporterCallback,
+                     const sp<audio_utils::MelAggregator>& melAggregator)
+            : mMelReporterCallback(melReporterCallback),
+              mMelAggregator(melAggregator),
+              mRs2UpperBound(kDefaultRs2UpperBound) {};
+
     /**
      * \brief Creates or gets the MelProcessor assigned to the streamHandle
      *
@@ -144,7 +153,7 @@
 
     // ------ Override audio_utils::MelProcessor::MelCallback ------
     void onNewMelValues(const std::vector<float>& mels, size_t offset, size_t length,
-                        audio_port_handle_t deviceId) const override;
+                        audio_port_handle_t deviceId, bool attenuated) const override;
 
     void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
 
@@ -205,6 +214,8 @@
 
     sp<media::ISoundDoseCallback> getSoundDoseCallback() const;
 
+    float getAttenuationForDeviceId(audio_port_handle_t id) const;
+
     void updateAttenuation(float attenuationDB, audio_devices_t deviceType);
     void setCsdEnabled(bool enabled);
     void setUseFrameworkMel(bool useFrameworkMel);
diff --git a/services/audioflinger/sounddose/tests/Android.bp b/services/audioflinger/sounddose/tests/Android.bp
index b963d25..fcbebe1 100644
--- a/services/audioflinger/sounddose/tests/Android.bp
+++ b/services/audioflinger/sounddose/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_base_license"
diff --git a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
index 294080b..e79b05e 100644
--- a/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
+++ b/services/audioflinger/sounddose/tests/sounddosemanager_tests.cpp
@@ -20,6 +20,7 @@
 #include <SoundDoseManager.h>
 
 #include <aidl/android/hardware/audio/core/sounddose/BnSoundDose.h>
+#include <audio_utils/MelAggregator.h>
 #include <gmock/gmock.h>
 #include <gtest/gtest.h>
 #include <media/AidlConversionCppNdk.h>
@@ -43,6 +44,15 @@
 public:
     MOCK_METHOD(void, startMelComputationForDeviceId, (audio_port_handle_t), (override));
     MOCK_METHOD(void, stopMelComputationForDeviceId, (audio_port_handle_t), (override));
+    MOCK_METHOD(void, applyAllAudioPatches, (), (override));
+};
+
+class MelAggregatorMock : public audio_utils::MelAggregator {
+public:
+    MelAggregatorMock() : MelAggregator(100) {}
+
+    MOCK_METHOD(std::vector<audio_utils::CsdRecord>, aggregateAndAddNewMelRecord,
+                (const audio_utils::MelRecord&), (override));
 };
 
 constexpr char kPrimaryModule[] = "primary";
@@ -52,7 +62,8 @@
 protected:
     void SetUp() override {
         mMelReporterCallback = sp<MelReporterCallback>::make();
-        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback);
+        mMelAggregator = sp<MelAggregatorMock>::make();
+        mSoundDoseManager = sp<SoundDoseManager>::make(mMelReporterCallback, mMelAggregator);
         mHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
         mSecondaryHalSoundDose = ndk::SharedRefBase::make<HalSoundDoseMock>();
 
@@ -69,6 +80,7 @@
     }
 
     sp<MelReporterCallback> mMelReporterCallback;
+    sp<MelAggregatorMock> mMelAggregator;
     sp<SoundDoseManager> mSoundDoseManager;
     std::shared_ptr<HalSoundDoseMock> mHalSoundDose;
     std::shared_ptr<HalSoundDoseMock> mSecondaryHalSoundDose;
@@ -110,12 +122,33 @@
     EXPECT_NE(processor1, processor2);
 }
 
-TEST_F(SoundDoseManagerTest, NewMelValuesCacheNewRecord) {
-    std::vector<float>mels{1, 1};
+TEST_F(SoundDoseManagerTest, NewMelValuesAttenuatedAggregateMels) {
+    std::vector<float>mels{1.f, 1.f};
 
-    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1);
+    EXPECT_CALL(*mMelAggregator.get(), aggregateAndAddNewMelRecord)
+            .Times(1)
+            .WillOnce([&] (const audio_utils::MelRecord& record) {
+                EXPECT_THAT(record.mels, ::testing::ElementsAreArray(mels));
+                return std::vector<audio_utils::CsdRecord>();
+            });
 
-    EXPECT_EQ(mSoundDoseManager->getCachedMelRecordsSize(), size_t{1});
+    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1,
+                                      /*attenuated=*/true);
+}
+
+TEST_F(SoundDoseManagerTest, NewMelValuesUnattenuatedAreSplit) {
+    std::vector<float>mels{79.f, 80.f, 79.f, 80.f, 79.f, 79.f, 80.f};
+
+    EXPECT_CALL(*mMelAggregator.get(), aggregateAndAddNewMelRecord)
+            .Times(3)
+            .WillRepeatedly([&] (const audio_utils::MelRecord& record) {
+                EXPECT_EQ(record.mels.size(), size_t {1});
+                EXPECT_EQ(record.mels[0], 80.f);
+                return std::vector<audio_utils::CsdRecord>();
+            });
+
+    mSoundDoseManager->onNewMelValues(mels, 0, mels.size(), /*deviceId=*/1,
+            /*attenuated=*/false);
 }
 
 TEST_F(SoundDoseManagerTest, InvalidHalInterfaceIsNotSet) {
diff --git a/services/audioflinger/timing/tests/Android.bp b/services/audioflinger/timing/tests/Android.bp
index 94eaa6a..040a914 100644
--- a/services/audioflinger/timing/tests/Android.bp
+++ b/services/audioflinger/timing/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_base_license"
diff --git a/services/audiopolicy/Android.bp b/services/audiopolicy/Android.bp
index e018dd3..66ba7e2 100644
--- a/services/audiopolicy/Android.bp
+++ b/services/audiopolicy/Android.bp
@@ -1,10 +1,11 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["frameworks_av_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_library_headers {
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index b164159..8f17ffc 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -166,7 +166,8 @@
                                      audio_input_flags_t flags,
                                      audio_port_handle_t *selectedDeviceId,
                                      input_type_t *inputType,
-                                     audio_port_handle_t *portId) = 0;
+                                     audio_port_handle_t *portId,
+                                     uint32_t *virtualDeviceId) = 0;
     // indicates to the audio policy manager that the input starts being used.
     virtual status_t startInput(audio_port_handle_t portId) = 0;
     // indicates to the audio policy manager that the input stops being used.
@@ -269,6 +270,7 @@
 
     virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes) = 0;
     virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes) = 0;
+    virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) = 0;
 
     virtual status_t updatePolicyMix(
         const AudioMix& mix,
@@ -285,7 +287,8 @@
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
-                                      uid_t uid) = 0;
+                                      uid_t uid,
+                                      bool internal = false) = 0;
     virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
 
     virtual status_t setMasterMono(bool mono) = 0;
diff --git a/services/audiopolicy/TEST_MAPPING b/services/audiopolicy/TEST_MAPPING
index a2ebb8d..cf1a771 100644
--- a/services/audiopolicy/TEST_MAPPING
+++ b/services/audiopolicy/TEST_MAPPING
@@ -46,6 +46,9 @@
           "include-filter": "com.google.android.gts.audio.AudioPolicyHostTest"
         }
       ]
+    },
+    {
+      "name": "spatializer_tests"
     }
   ]
 }
diff --git a/services/audiopolicy/common/Android.bp b/services/audiopolicy/common/Android.bp
index 91701ad..a699b8b 100644
--- a/services/audiopolicy/common/Android.bp
+++ b/services/audiopolicy/common/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index 8b76842..e8b04ce 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -35,6 +36,7 @@
         "src/TypeConverter.cpp",
     ],
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "libaudioclient_aidl_conversion",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index c8b2962..00958aa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -363,7 +363,7 @@
 
     void dump(String8 *dst, int spaces, const char* extraInfo = nullptr) const override;
     virtual DeviceVector devices() const;
-    void setDevices(const DeviceVector &devices) { mDevices = devices; }
+    void setDevices(const DeviceVector &devices);
     bool sharesHwModuleWith(const sp<SwAudioOutputDescriptor>& outputDesc);
     virtual DeviceVector supportedDevices() const;
     virtual bool devicesSupportEncodedFormats(const DeviceTypeSet& deviceTypes);
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index 7119b85..fe90a1e 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -222,7 +222,8 @@
                            const struct audio_port_config &config,
                            const sp<DeviceDescriptor>& srcDevice,
                            audio_stream_type_t stream, product_strategy_t strategy,
-                           VolumeSource volumeSource);
+                           VolumeSource volumeSource,
+                           bool isInternal);
 
     ~SourceClientDescriptor() override = default;
 
@@ -248,6 +249,7 @@
     void setSwOutput(const sp<SwAudioOutputDescriptor>& swOutput, bool closeOutput = false);
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
+    bool isInternal() const override { return mIsInternal; }
 
     using ClientDescriptor::dump;
     void dump(String8 *dst, int spaces) const override;
@@ -268,34 +270,17 @@
      * behavior of AudioDeviceCallback.
      */
     bool mCloseOutput = false;
-};
-
-/**
- * @brief The InternalSourceClientDescriptor class
- * Specialized Client Descriptor for either a raw patch created from @see createAudioPatch API
- * or for internal audio patches managed by APM (e.g. phone call patches).
- * Whatever the bridge created (software or hardware), we need a client to track the activity
- * and manage volumes.
- * The Audio Patch requested sink is expressed as a preferred device which allows to route
- * the SwOutput. Then APM will performs checks on the UID (against UID of Audioserver) of the
- * requester to prevent rerouting SwOutput involved in raw patches.
- */
-class InternalSourceClientDescriptor: public SourceClientDescriptor
-{
-public:
-    InternalSourceClientDescriptor(
-            audio_port_handle_t portId, uid_t uid, audio_attributes_t attributes,
-            const struct audio_port_config &config, const sp<DeviceDescriptor>& srcDevice,
-             const sp<DeviceDescriptor>& sinkDevice,
-            product_strategy_t strategy, VolumeSource volumeSource) :
-        SourceClientDescriptor(
-            portId, uid, attributes, config, srcDevice, AUDIO_STREAM_PATCH, strategy,
-            volumeSource)
-    {
-        setPreferredDeviceId(sinkDevice->getId());
-    }
-    bool isInternal() const override { return true; }
-    ~InternalSourceClientDescriptor() override = default;
+    /**
+     * True for specialized Client Descriptor for either a raw patch created from
+     * @see createAudioPatch API or for internal audio patches managed by APM
+     * (e.g. phone call patches).
+     * Whatever the bridge created (software or hardware), we need a client to track the activity
+     * and manage volumes.
+     * The Audio Patch requested sink is expressed as a preferred device which allows to route
+     * the SwOutput. Then APM will performs checks on the UID (against UID of Audioserver) of the
+     * requester to prevent rerouting SwOutput involved in raw patches.
+     */
+    bool mIsInternal = false;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index 6c130fd..c502fc2 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -93,6 +93,8 @@
 
     void setEncapsulationInfoFromHal(AudioPolicyClientInterface *clientInterface);
 
+    void setPreferredConfig(const audio_config_base_t * preferredConfig);
+
     void dump(String8 *dst, int spaces, bool verbose = true) const;
 
 private:
@@ -107,6 +109,7 @@
     audio_format_t      mCurrentEncodedFormat;
     bool                mIsDynamic = false;
     std::string         mDeclaredAddress; // Original device address
+    std::optional<audio_config_base_t> mPreferredConfig;
 };
 
 class DeviceVector : public SortedVector<sp<DeviceDescriptor> >
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index c2e4b11..b92cd70 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -32,11 +32,15 @@
 class EffectDescriptor : public RefBase
 {
 public:
-    EffectDescriptor(const effect_descriptor_t *desc, bool isMusicEffect,
-                     int id, audio_io_handle_t io, audio_session_t session) :
-        mId(id), mIo(io), mSession(session), mEnabled(false), mSuspended(false),
-        mIsMusicEffect(isMusicEffect)
-    {
+  EffectDescriptor(const effect_descriptor_t* desc, bool isMusicEffect, int id,
+                   audio_io_handle_t io, audio_session_t session)
+      : mId(id),
+        mIo(io),
+        mIsOrphan(io == AUDIO_IO_HANDLE_NONE),
+        mSession(session),
+        mEnabled(false),
+        mSuspended(false),
+        mIsMusicEffect(isMusicEffect) {
         memcpy (&mDesc, desc, sizeof(effect_descriptor_t));
     }
 
@@ -95,11 +99,22 @@
      * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
      */
     audio_io_handle_t getIoForSession(audio_session_t sessionId,
-                                      const effect_uuid_t *effectType = nullptr);
-    bool hasOrphansForSession(audio_session_t sessionId);
+                                      const effect_uuid_t *effectType = nullptr) const;
+    bool hasOrphansForSession(audio_session_t sessionId) const;
     EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
+    /**
+     * @brief Checks if there is at least one orphan effect with given sessionId and effect type
+     * uuid.
+     * @param sessionId Session ID.
+     * @param effectType Effect type UUID, the implementation will be same as hasOrphansForSession
+     * if null.
+     * @return True if there is an orphan effect for given sessionId and type UUID, false otherwise.
+     */
+    bool hasOrphanEffectsForSessionAndType(audio_session_t sessionId,
+                                           const effect_uuid_t* effectType) const;
+
 private:
     status_t setEffectEnabled(const sp<EffectDescriptor> &effectDesc, bool enabled);
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
index f3a9518..688772c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
+++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h
@@ -70,10 +70,17 @@
         return mMixerBehaviors;
     }
 
+    enum CompatibilityScore{
+        NO_MATCH = 0,
+        PARTIAL_MATCH = 1,
+        EXACT_MATCH = 2
+    };
+
     /**
-     * @brief isCompatibleProfile: This method is used for input and direct output,
+     * @brief compatibilityScore: This method is used for input and direct output,
      * and is not used for other output.
-     * Checks if the IO profile is compatible with specified parameters.
+     * Return the compatibility score to measure how much the IO profile is compatible
+     * with specified parameters.
      * For input, flags is interpreted as audio_input_flags_t.
      * TODO: merge audio_output_flags_t and audio_input_flags_t.
      *
@@ -86,18 +93,18 @@
      * @param updatedChannelMask if non-NULL, it is assigned the actual channel mask
      * @param flags to be checked for compatibility
      * @param exactMatchRequiredForInputFlags true if exact match is required on flags
-     * @return true if the profile is compatible, false otherwise.
+     * @return how the IO profile is compatible with the given parameters.
      */
-    bool isCompatibleProfile(const DeviceVector &devices,
-                             uint32_t samplingRate,
-                             uint32_t *updatedSamplingRate,
-                             audio_format_t format,
-                             audio_format_t *updatedFormat,
-                             audio_channel_mask_t channelMask,
-                             audio_channel_mask_t *updatedChannelMask,
-                             // FIXME parameter type
-                             uint32_t flags,
-                             bool exactMatchRequiredForInputFlags = false) const;
+    CompatibilityScore getCompatibilityScore(const DeviceVector &devices,
+                                             uint32_t samplingRate,
+                                             uint32_t *updatedSamplingRate,
+                                             audio_format_t format,
+                                             audio_format_t *updatedFormat,
+                                             audio_channel_mask_t channelMask,
+                                             audio_channel_mask_t *updatedChannelMask,
+                                             // FIXME parameter type
+                                             uint32_t flags,
+                                             bool exactMatchRequiredForInputFlags = false) const;
 
     /**
      * @brief areAllDevicesSupported: Checks if the given devices are supported by the IO profile.
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 6f71ac5..44f84b9 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -235,7 +235,8 @@
                                                   &deviceType,
                                                   String8(mDevice->address().c_str()),
                                                   source,
-                                                  flags);
+                                                  static_cast<audio_input_flags_t>(
+                                                          flags & mProfile->getFlags()));
     LOG_ALWAYS_FATAL_IF(mDevice->type() != deviceType,
                         "%s openInput returned device %08x when given device %08x",
                         __FUNCTION__, mDevice->type(), deviceType);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index d027564..6537a00 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -778,6 +778,19 @@
     }
 }
 
+void SwAudioOutputDescriptor::setDevices(const android::DeviceVector &devices) {
+    if ((mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+        for (auto device : mDevices) {
+            device->setPreferredConfig(nullptr);
+        }
+        auto config = getConfig();
+        for (auto device : devices) {
+            device->setPreferredConfig(&config);
+        }
+    }
+    mDevices = devices;
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index dc0f466..3430f4b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -15,7 +15,7 @@
  */
 
 #define LOG_TAG "APM_AudioPolicyMix"
-// #define LOG_NDEBUG 0
+//#define LOG_NDEBUG 0
 
 #include <algorithm>
 #include <iterator>
@@ -28,6 +28,9 @@
 #include "PolicyAudioPort.h"
 #include "IOProfile.h"
 #include <AudioOutputDescriptor.h>
+#include <android_media_audiopolicy.h>
+
+namespace audiopolicy_flags = android::media::audiopolicy;
 
 namespace android {
 namespace {
@@ -190,6 +193,12 @@
                     mix.mDeviceType, mix.mDeviceAddress.c_str());
             return BAD_VALUE;
         }
+        if (audiopolicy_flags::audio_mix_ownership()) {
+            if (mix.mToken == registeredMix->mToken) {
+                ALOGE("registerMix(): same mix already registered - skipping");
+                return BAD_VALUE;
+            }
+        }
     }
     if (!areMixCriteriaConsistent(mix.mCriteria)) {
         ALOGE("registerMix(): Mix contains inconsistent criteria "
@@ -212,12 +221,21 @@
 {
     for (size_t i = 0; i < size(); i++) {
         const sp<AudioPolicyMix>& registeredMix = itemAt(i);
-        if (mix.mDeviceType == registeredMix->mDeviceType
+        if (audiopolicy_flags::audio_mix_ownership()) {
+            if (mix.mToken == registeredMix->mToken) {
+                ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+                      mix.mDeviceType, mix.mDeviceAddress.c_str());
+                removeAt(i);
+                return NO_ERROR;
+            }
+        } else {
+            if (mix.mDeviceType == registeredMix->mDeviceType
                 && mix.mDeviceAddress.compare(registeredMix->mDeviceAddress) == 0) {
-            ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
-                    mix.mDeviceType, mix.mDeviceAddress.c_str());
-            removeAt(i);
-            return NO_ERROR;
+                ALOGD("unregisterMix(): removing mix for dev=0x%x addr=%s",
+                      mix.mDeviceType, mix.mDeviceAddress.c_str());
+                removeAt(i);
+                return NO_ERROR;
+            }
         }
     }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 8b6866e..2aee501 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -96,12 +96,12 @@
 SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
          audio_attributes_t attributes, const struct audio_port_config &config,
          const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
-         product_strategy_t strategy, VolumeSource volumeSource) :
+         product_strategy_t strategy, VolumeSource volumeSource, bool isInternal) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
         {} /* Sources do not support secondary outputs*/, nullptr),
-    mSrcDevice(srcDevice)
+    mSrcDevice(srcDevice), mIsInternal(isInternal)
 {
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index fe25693..9f7b8fc 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -132,6 +132,20 @@
 {
     DeviceDescriptorBase::toAudioPortConfig(dstConfig, srcConfig);
     dstConfig->ext.device.hw_module = getModuleHandle();
+    if (mPreferredConfig.has_value()) {
+        if (mPreferredConfig->format != AUDIO_FORMAT_DEFAULT) {
+            dstConfig->config_mask |= AUDIO_PORT_CONFIG_FORMAT;
+            dstConfig->format = mPreferredConfig->format;
+        }
+        if (mPreferredConfig->sample_rate != 0) {
+            dstConfig->config_mask |= AUDIO_PORT_CONFIG_SAMPLE_RATE;
+            dstConfig->sample_rate = mPreferredConfig->sample_rate;
+        }
+        if (mPreferredConfig->channel_mask != AUDIO_CHANNEL_NONE) {
+            dstConfig->config_mask |= AUDIO_PORT_CONFIG_CHANNEL_MASK;
+            dstConfig->channel_mask = mPreferredConfig->channel_mask;
+        }
+    }
 }
 
 void DeviceDescriptor::toAudioPort(struct audio_port *port) const
@@ -183,6 +197,14 @@
     }
 }
 
+void DeviceDescriptor::setPreferredConfig(const audio_config_base_t* preferredConfig) {
+    if (preferredConfig == nullptr) {
+        mPreferredConfig.reset();
+    } else {
+        mPreferredConfig = *preferredConfig;
+    }
+}
+
 void DeviceDescriptor::dump(String8 *dst, int spaces, bool verbose) const
 {
     String8 extraInfo;
@@ -193,6 +215,13 @@
     std::string descBaseDumpStr;
     DeviceDescriptorBase::dump(&descBaseDumpStr, spaces, extraInfo.c_str(), verbose);
     dst->append(descBaseDumpStr.c_str());
+
+    if (mPreferredConfig.has_value()) {
+        dst->append(base::StringPrintf(
+                "%*sPreferred Config: format=%#x, channelMask=%#x, sampleRate=%u\n",
+                spaces, "", mPreferredConfig.value().format, mPreferredConfig.value().channel_mask,
+                mPreferredConfig.value().sample_rate).c_str());
+    }
 }
 
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index c85df0f..7971b61 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -210,7 +210,7 @@
     }
 }
 
-bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId)
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId) const
 {
     for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
@@ -221,6 +221,22 @@
     return false;
 }
 
+bool EffectDescriptorCollection::hasOrphanEffectsForSessionAndType(
+        audio_session_t sessionId, const effect_uuid_t* effectType) const {
+    if (effectType == nullptr) {
+        return hasOrphansForSession(sessionId);
+    }
+
+    for (size_t i = 0; i < size(); ++i) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mIsOrphan && effect->mSession == sessionId &&
+            memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0) {
+            return true;
+        }
+    }
+    return false;
+}
+
 EffectDescriptorCollection EffectDescriptorCollection::getOrphanEffectsForSession(
         audio_session_t sessionId) const
 {
@@ -235,7 +251,7 @@
 }
 
 audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
-                                                              const effect_uuid_t *effectType)
+                                                              const effect_uuid_t *effectType) const
 {
     for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index c7d2e6b..d9fbd89 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -33,17 +33,17 @@
     }
 }
 
-bool IOProfile::isCompatibleProfile(const DeviceVector &devices,
-                                    uint32_t samplingRate,
-                                    uint32_t *updatedSamplingRate,
-                                    audio_format_t format,
-                                    audio_format_t *updatedFormat,
-                                    audio_channel_mask_t channelMask,
-                                    audio_channel_mask_t *updatedChannelMask,
-                                    // FIXME type punning here
-                                    uint32_t flags,
-                                    bool exactMatchRequiredForInputFlags) const
-{
+IOProfile::CompatibilityScore IOProfile::getCompatibilityScore(
+        const android::DeviceVector &devices,
+        uint32_t samplingRate,
+        uint32_t *updatedSamplingRate,
+        audio_format_t format,
+        audio_format_t *updatedFormat,
+        audio_channel_mask_t channelMask,
+        audio_channel_mask_t *updatedChannelMask,
+        // FIXME type punning here
+        uint32_t flags,
+        bool exactMatchRequiredForInputFlags) const {
     const bool isPlaybackThread =
             getType() == AUDIO_PORT_TYPE_MIX && getRole() == AUDIO_PORT_ROLE_SOURCE;
     const bool isRecordThread =
@@ -51,13 +51,13 @@
     ALOG_ASSERT(isPlaybackThread != isRecordThread);
     if (!areAllDevicesSupported(devices) ||
             !isCompatibleProfileForFlags(flags, exactMatchRequiredForInputFlags)) {
-        return false;
+        return NO_MATCH;
     }
 
     if (!audio_is_valid_format(format) ||
             (isPlaybackThread && (samplingRate == 0 || !audio_is_output_channel(channelMask))) ||
             (isRecordThread && (!audio_is_input_channel(channelMask)))) {
-         return false;
+         return NO_MATCH;
     }
 
     audio_format_t myUpdatedFormat = format;
@@ -69,32 +69,40 @@
         .channel_mask = channelMask,
         .format = format,
     };
+    auto result = NO_MATCH;
     if (isRecordThread)
     {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
             if (checkExactAudioProfile(&config) != NO_ERROR) {
-                return false;
+                return result;
             }
-        } else if (checkExactAudioProfile(&config) != NO_ERROR && checkCompatibleAudioProfile(
-                myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) != NO_ERROR) {
-            return false;
+            result = EXACT_MATCH;
+        } else if (checkExactAudioProfile(&config) == NO_ERROR) {
+            result = EXACT_MATCH;
+        } else if (checkCompatibleAudioProfile(
+                myUpdatedSamplingRate, myUpdatedChannelMask, myUpdatedFormat) == NO_ERROR) {
+            result = PARTIAL_MATCH;
+        } else {
+            return result;
         }
     } else {
-        if (checkExactAudioProfile(&config) != NO_ERROR) {
-            return false;
+        if (checkExactAudioProfile(&config) == NO_ERROR) {
+            result = EXACT_MATCH;
+        } else {
+            return result;
         }
     }
 
-    if (updatedSamplingRate != NULL) {
+    if (updatedSamplingRate != nullptr) {
         *updatedSamplingRate = myUpdatedSamplingRate;
     }
-    if (updatedFormat != NULL) {
+    if (updatedFormat != nullptr) {
         *updatedFormat = myUpdatedFormat;
     }
-    if (updatedChannelMask != NULL) {
+    if (updatedChannelMask != nullptr) {
         *updatedChannelMask = myUpdatedChannelMask;
     }
-    return true;
+    return result;
 }
 
 bool IOProfile::areAllDevicesSupported(const DeviceVector &devices) const {
diff --git a/services/audiopolicy/config/Android.bp b/services/audiopolicy/config/Android.bp
index 321181d..7d529df 100644
--- a/services/audiopolicy/config/Android.bp
+++ b/services/audiopolicy/config/Android.bp
@@ -18,6 +18,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index d7eb2c8..9e07d79 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 05434bc..ab2c134 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
index fd2ded8..2df51d0 100644
--- a/services/audiopolicy/engine/config/tests/Android.bp
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
index 9cee978..99d62a3 100644
--- a/services/audiopolicy/engine/config/tests/resources/Android.bp
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engine/interface/Android.bp b/services/audiopolicy/engine/interface/Android.bp
index 5dd5adb..b1f7666 100644
--- a/services/audiopolicy/engine/interface/Android.bp
+++ b/services/audiopolicy/engine/interface/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index aaf89a0..66df930 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index a733c3f..8dd13e8 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -17,6 +17,7 @@
 // Root soong_namespace for common components
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
index f0926eb..7e429ef 100644
--- a/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/automotive/Android.bp
@@ -23,6 +23,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
index 981b5a7..12a554d 100644
--- a/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/caremu/Android.bp
@@ -24,6 +24,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
index 9f44bd6..b0a4dfd 100644
--- a/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/example/phone/Android.bp
@@ -23,6 +23,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index 98b8e78..7fe111f 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -17,6 +17,7 @@
 // Root soong_namespace for common components
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
index 7329032..42585e9 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Car/Android.bp
@@ -27,6 +27,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
index 6715e06..efde298 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/CarEmu/Android.bp
@@ -28,6 +28,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
index bd401d0..474094e 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/Phone/Android.bp
@@ -27,6 +27,7 @@
 // Generate Audio Policy Parameter Framework Product Strategies Structure file from template
 //
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
index 7c6fc54..aba9767 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoInput/Android.bp
@@ -24,6 +24,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
index f1348df..77677a1 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/examples/SettingsNoOutput/Android.bp
@@ -24,6 +24,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index 1495b46..3dc2229 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index 95c2fb6..d1fb2fb 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -13,6 +13,7 @@
 // limitations under the License.
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 770e56c..78d5fa3 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 1563d5f..7810d63 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/enginedefault/config/example/Android.bp b/services/audiopolicy/enginedefault/config/example/Android.bp
index 679b455..31f9a46 100644
--- a/services/audiopolicy/enginedefault/config/example/Android.bp
+++ b/services/audiopolicy/enginedefault/config/example/Android.bp
@@ -20,6 +20,7 @@
 }
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index d276a76..8cee613 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -17,6 +17,7 @@
  ******************************************************************************/
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 1227db9..2c85955 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -16,6 +16,10 @@
  *
  ******************************************************************************/
 
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+}
+
 cc_defaults {
     name: "audiopolicy_aidl_fuzzer_defaults",
     shared_libs: [
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 58fcb5c..6416a47 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -207,7 +207,8 @@
                          audio_port_handle_t *selectedDeviceId, audio_format_t format,
                          audio_channel_mask_t channelMask, int sampleRate,
                          audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-                         audio_port_handle_t *portId = nullptr);
+                         audio_port_handle_t *portId = nullptr,
+                         uint32_t *virtualDeviceId = nullptr);
     bool findDevicePort(audio_port_role_t role, audio_devices_t deviceType,
                         const std::string &address, audio_port_v7 *foundPort);
     static audio_port_handle_t getDeviceIdFromPatch(const struct audio_patch *patch);
@@ -283,7 +284,7 @@
 bool AudioPolicyManagerFuzzer::getInputForAttr(
     const audio_attributes_t &attr, audio_unique_id_t riid, audio_port_handle_t *selectedDeviceId,
     audio_format_t format, audio_channel_mask_t channelMask, int sampleRate,
-    audio_input_flags_t flags, audio_port_handle_t *portId) {
+    audio_input_flags_t flags, audio_port_handle_t *portId, uint32_t *virtualDeviceId) {
     audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     config.sample_rate = sampleRate;
@@ -298,7 +299,7 @@
     attributionSource.uid = 0;
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getInputForAttr(&attr, &input, riid, AUDIO_SESSION_NONE, attributionSource,
-            &config, flags, selectedDeviceId, &inputType, portId) != OK) {
+            &config, flags, selectedDeviceId, &inputType, portId, virtualDeviceId) != OK) {
         return false;
     }
     if (*portId == AUDIO_PORT_HANDLE_NONE || input == AUDIO_IO_HANDLE_NONE) {
diff --git a/services/audiopolicy/fuzzer/resources/Android.bp b/services/audiopolicy/fuzzer/resources/Android.bp
index 22ee256..2a2b83b 100644
--- a/services/audiopolicy/fuzzer/resources/Android.bp
+++ b/services/audiopolicy/fuzzer/resources/Android.bp
@@ -17,6 +17,7 @@
  ******************************************************************************/
 
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/managerdefault/Android.bp b/services/audiopolicy/managerdefault/Android.bp
index a3acdc7..e6f6374 100644
--- a/services/audiopolicy/managerdefault/Android.bp
+++ b/services/audiopolicy/managerdefault/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -22,6 +23,7 @@
     export_include_dirs: ["."],
 
     shared_libs: [
+        "com.android.media.audio-aconfig-cc",
         "libaudiofoundation",
         "libaudiopolicy",
         "libaudiopolicycomponents",
@@ -39,6 +41,9 @@
         // a dependency on it in the device makefile. There will be no build time
         // conflict with libaudiopolicyenginedefault.
         "audioclient-types-aidl-cpp",
+        // Flag support
+        "android.media.audiopolicy-aconfig-cc",
+        "com.android.media.audioserver-aconfig-cc",
         "framework-permission-aidl-cpp",
         "libaudioclient_aidl_conversion",
         "libaudiopolicyenginedefault",
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 42afa1e..be8e096 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -41,6 +41,9 @@
 
 #include <Serializer.h>
 #include <android/media/audio/common/AudioPort.h>
+#include <com_android_media_audio.h>
+#include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <media/AudioParameter.h>
@@ -56,6 +59,9 @@
 
 namespace android {
 
+
+namespace audio_flags = android::media::audiopolicy;
+
 using android::media::audio::common::AudioDevice;
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioPortDeviceExt;
@@ -781,7 +787,11 @@
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-    mCallRxSourceClient = startAudioSourceInternal(&source, &aa, 0/*uid*/);
+
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    status_t status = startAudioSource(&source, &aa, &portId, 0 /*uid*/, true /*internal*/);
+    ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
+    mCallRxSourceClient = mAudioSources.valueFor(portId);
     ALOGE_IF(mCallRxSourceClient == nullptr,
              "%s failed to start Telephony Rx AudioSource", __func__);
 }
@@ -814,9 +824,11 @@
 
     struct audio_port_config source = {};
     srcDevice->toAudioPortConfig(&source);
-    mCallTxSourceClient = new InternalSourceClientDescriptor(
-                callTxSourceClientPortId, mUidCached, aa, source, srcDevice, sinkDevice,
-                mCommunnicationStrategy, toVolumeSource(aa));
+    mCallTxSourceClient = new SourceClientDescriptor(
+                callTxSourceClientPortId, mUidCached, aa, source, srcDevice, AUDIO_STREAM_PATCH,
+                mCommunnicationStrategy, toVolumeSource(aa), true);
+    mCallTxSourceClient->setPreferredDeviceId(sinkDevice->getId());
+
     audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
     status_t status = connectAudioSourceToSink(
                 mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
@@ -1043,11 +1055,11 @@
     sp<IOProfile> profile;
     for (const auto& hwModule : hwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-             if (!curProfile->isCompatibleProfile(devices,
+             if (curProfile->getCompatibilityScore(devices,
                      samplingRate, NULL /*updatedSamplingRate*/,
                      format, NULL /*updatedFormat*/,
                      channelMask, NULL /*updatedChannelMask*/,
-                     flags)) {
+                     flags) == IOProfile::NO_MATCH) {
                  continue;
              }
              // reject profiles not corresponding to a device currently available
@@ -1509,11 +1521,30 @@
     }
 
     if (!profile->canOpenNewIo()) {
+        if (!com::android::media::audioserver::direct_track_reprioritization()) {
+            return NAME_NOT_FOUND;
+        } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
+            // MMAP gracefully handles lack of an exclusive track resource by mixing
+            // above the audio framework. For AAudio to know that the limit is reached,
+            // return an error.
+            return NAME_NOT_FOUND;
+        } else {
+            // Close outputs on this profile, if available, to free resources for this request
+            for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
+                const auto desc = mOutputs.valueAt(i);
+                if (desc->mProfile == profile) {
+                    closeOutput(desc->mIoHandle);
+                }
+            }
+        }
+    }
+
+    // Unable to close streams to find free resources for this request
+    if (!profile->canOpenNewIo()) {
         return NAME_NOT_FOUND;
     }
 
-    sp<SwAudioOutputDescriptor> outputDesc =
-            new SwAudioOutputDescriptor(profile, mpClientInterface);
+    auto outputDesc = sp<SwAudioOutputDescriptor>::make(profile, mpClientInterface);
 
     // An MSD patch may be using the only output stream that can service this request. Release
     // all MSD patches to prioritize this request over any active output on MSD.
@@ -1606,9 +1637,13 @@
         *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_ULTRASOUND);
     }
 
+    // Use the spatializer output if the content can be spatialized, no preferred mixer
+    // was specified and offload or direct playback is not explicitly requested.
     *isSpatialized = false;
     if (mSpatializerOutput != nullptr
-            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())) {
+            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())
+            && prefMixerConfigInfo == nullptr
+            && ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0)) {
         *isSpatialized = true;
         return mSpatializerOutput->mIoHandle;
     }
@@ -2034,6 +2069,8 @@
     // matching criteria values in priority order for best matching output so far
     std::vector<uint32_t> bestMatchCriteria(8, 0);
 
+    const bool hasOrphanHaptic =
+            mEffects.hasOrphanEffectsForSessionAndType(sessionId, FX_IID_HAPTICGENERATOR);
     const uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
     const uint32_t hapticChannelCount = audio_channel_count_from_out_mask(
         channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
@@ -2054,13 +2091,20 @@
         // When using haptic output, same audio format and sample rate are required.
         const uint32_t outputHapticChannelCount = audio_channel_count_from_out_mask(
             outputDesc->getChannelMask() & AUDIO_CHANNEL_HAPTIC_ALL);
-        if ((hapticChannelCount == 0) != (outputHapticChannelCount == 0)) {
+        // skip if haptic channel specified but output does not support it, or output support haptic
+        // but there is no haptic channel requested AND no orphan haptic effect exist
+        if ((hapticChannelCount != 0 && outputHapticChannelCount == 0) ||
+            (hapticChannelCount == 0 && outputHapticChannelCount != 0 && !hasOrphanHaptic)) {
             continue;
         }
-        if (outputHapticChannelCount >= hapticChannelCount
-            && format == outputDesc->getFormat()
-            && samplingRate == outputDesc->getSamplingRate()) {
-                currentMatchCriteria[0] = outputHapticChannelCount;
+        // In the case of audio-coupled-haptic playback, there is no format conversion and
+        // resampling in the framework, same format/channel/sampleRate for client and the output
+        // thread is required. In the case of HapticGenerator effect, do not require format
+        // matching.
+        if ((outputHapticChannelCount >= hapticChannelCount && format == outputDesc->getFormat() &&
+             samplingRate == outputDesc->getSamplingRate()) ||
+            (hapticChannelCount == 0 && hasOrphanHaptic)) {
+            currentMatchCriteria[0] = outputHapticChannelCount;
         }
 
         // functional flags match
@@ -2662,7 +2706,8 @@
                                              audio_input_flags_t flags,
                                              audio_port_handle_t *selectedDeviceId,
                                              input_type_t *inputType,
-                                             audio_port_handle_t *portId)
+                                             audio_port_handle_t *portId,
+                                             uint32_t *virtualDeviceId)
 {
     ALOGV("%s() source %d, sampling rate %d, format %#x, channel mask %#x, session %d, "
           "flags %#x attributes=%s requested device ID %d",
@@ -2764,6 +2809,9 @@
         } else {
             *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE;
         }
+        if (virtualDeviceId) {
+            *virtualDeviceId = policyMix->mVirtualDeviceId;
+        }
     } else {
         if (explicitRoutingDevice != nullptr) {
             device = explicitRoutingDevice;
@@ -2787,6 +2835,10 @@
             // meaning it receives audio injected into the framework, so the recorder doesn't
             // know about it and is therefore considered "legacy"
             *inputType = API_INPUT_LEGACY;
+
+            if (virtualDeviceId) {
+                *virtualDeviceId = policyMix->mVirtualDeviceId;
+            }
         } else if (audio_is_remote_submix_device(device->type())) {
             *inputType = API_INPUT_MIX_CAPTURE;
         } else if (device->type() == AUDIO_DEVICE_IN_TELEPHONY_RX) {
@@ -2818,6 +2870,11 @@
         goto error;
     }
 
+
+    if (policyMix != nullptr && virtualDeviceId != nullptr) {
+        *virtualDeviceId = policyMix->mVirtualDeviceId;
+    }
+
 exit:
 
     *selectedDeviceId = mAvailableInputDevices.contains(device) ?
@@ -3142,7 +3199,12 @@
     ALOGV("%s %d", __FUNCTION__, input);
 
     inputDesc->removeClient(portId);
-    mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
+
+    // If no more clients are present in this session, park effects to an orphan chain
+    RecordClientVector clientsOnSession = inputDesc->getClientsForSession(client->session());
+    if (clientsOnSession.size() == 0) {
+        mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
+    }
     if (inputDesc->getClientCount() > 0) {
         ALOGV("%s(%d) %zu clients remaining", __func__, portId, inputDesc->getClientCount());
         return;
@@ -3542,7 +3604,7 @@
                                 int session,
                                 int id)
 {
-    if (session != AUDIO_SESSION_DEVICE) {
+    if (session != AUDIO_SESSION_DEVICE && io != AUDIO_IO_HANDLE_NONE) {
         ssize_t index = mOutputs.indexOfKey(io);
         if (index < 0) {
             index = mInputs.indexOfKey(io);
@@ -3643,6 +3705,7 @@
     status_t res = NO_ERROR;
     bool checkOutputs = false;
     sp<HwModule> rSubmixModule;
+    Vector<AudioMix> registeredMixes;
     // examine each mix's route type
     for (size_t i = 0; i < mixes.size(); i++) {
         AudioMix mix = mixes[i];
@@ -3766,11 +3829,19 @@
                 break;
             } else {
                 checkOutputs = true;
+                registeredMixes.add(mix);
             }
         }
     }
     if (res != NO_ERROR) {
-        unregisterPolicyMixes(mixes);
+        if (audio_flags::audio_mix_ownership()) {
+            // Only unregister mixes that were actually registered to not accidentally unregister
+            // mixes that already existed previously.
+            unregisterPolicyMixes(registeredMixes);
+            registeredMixes.clear();
+        } else {
+            unregisterPolicyMixes(mixes);
+        }
     } else if (checkOutputs) {
         checkForDeviceAndOutputChanges();
         updateCallAndOutputRouting();
@@ -3804,15 +3875,22 @@
                 continue;
             }
 
-            for (auto device : {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
+            for (auto device: {AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_DEVICE_OUT_REMOTE_SUBMIX}) {
                 if (getDeviceConnectionState(device, address.c_str()) ==
-                        AUDIO_POLICY_DEVICE_STATE_AVAILABLE)  {
-                    res = setDeviceConnectionStateInt(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                      address.c_str(), "remote-submix",
-                                                      AUDIO_FORMAT_DEFAULT);
-                    if (res != OK) {
+                    AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+                    status_t currentRes =
+                            setDeviceConnectionStateInt(device,
+                                                        AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                        address.c_str(),
+                                                        "remote-submix",
+                                                        AUDIO_FORMAT_DEFAULT);
+                    if (!audio_flags::audio_mix_ownership()) {
+                        res = currentRes;
+                    }
+                    if (currentRes != OK) {
                         ALOGE("Error making RemoteSubmix device unavailable for mix "
                               "with type %d, address %s", device, address.c_str());
+                        res = INVALID_OPERATION;
                     }
                 }
             }
@@ -3828,6 +3906,7 @@
             }
         }
     }
+
     if (res == NO_ERROR && checkOutputs) {
         checkForDeviceAndOutputChanges();
         updateCallAndOutputRouting();
@@ -3835,6 +3914,26 @@
     return res;
 }
 
+status_t AudioPolicyManager::getRegisteredPolicyMixes(std::vector<AudioMix>& _aidl_return) {
+    if (!audio_flags::audio_mix_test_api()) {
+        return INVALID_OPERATION;
+    }
+
+    _aidl_return.clear();
+    _aidl_return.reserve(mPolicyMixes.size());
+    for (const auto &policyMix: mPolicyMixes) {
+        _aidl_return.emplace_back(policyMix->mCriteria, policyMix->mMixType,
+                             policyMix->mFormat, policyMix->mRouteFlags, policyMix->mDeviceAddress,
+                             policyMix->mCbFlags);
+        _aidl_return.back().mDeviceType = policyMix->mDeviceType;
+        _aidl_return.back().mToken = policyMix->mToken;
+        _aidl_return.back().mVirtualDeviceId = policyMix->mVirtualDeviceId;
+    }
+
+    ALOGVV("%s() returning %zu registered mixes", __func__, _aidl_return.size());
+    return OK;
+}
+
 status_t AudioPolicyManager::updatePolicyMix(
             const AudioMix& mix,
             const std::vector<AudioMixMatchCriterion>& updatedCriteria) {
@@ -4456,11 +4555,11 @@
             outputDevices = getMsdAudioOutDevices();
         }
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
-            if (!curProfile->isCompatibleProfile(outputDevices,
+            if (curProfile->getCompatibilityScore(outputDevices,
                     config->sample_rate, nullptr /*updatedSamplingRate*/,
                     config->format, nullptr /*updatedFormat*/,
                     config->channel_mask, nullptr /*updatedChannelMask*/,
-                    flags)) {
+                    flags) == IOProfile::NO_MATCH) {
                 continue;
             }
             // reject profiles not corresponding to a device currently available
@@ -4566,15 +4665,17 @@
     for (const auto& hwModule : mHwModules) {
         for (const auto& curProfile : hwModule->getOutputProfiles()) {
             if (curProfile->hasDynamicAudioProfile()
-                    && curProfile->isCompatibleProfile(devices,
-                                                       mixerAttributes->config.sample_rate,
-                                                       nullptr /*updatedSamplingRate*/,
-                                                       mixerAttributes->config.format,
-                                                       nullptr /*updatedFormat*/,
-                                                       mixerAttributes->config.channel_mask,
-                                                       nullptr /*updatedChannelMask*/,
-                                                       flags,
-                                                       false /*exactMatchRequiredForInputFlags*/)) {
+                    && curProfile->getCompatibilityScore(
+                            devices,
+                            mixerAttributes->config.sample_rate,
+                            nullptr /*updatedSamplingRate*/,
+                            mixerAttributes->config.format,
+                            nullptr /*updatedFormat*/,
+                            mixerAttributes->config.channel_mask,
+                            nullptr /*updatedChannelMask*/,
+                            flags,
+                            false /*exactMatchRequiredForInputFlags*/)
+                            != IOProfile::NO_MATCH) {
                 profile = curProfile;
                 break;
             }
@@ -4855,9 +4956,11 @@
     audio_attributes_t attributes = attributes_initializer(AUDIO_USAGE_MEDIA);
     const struct audio_port_config *source = &patch->sources[0];
     sp<SourceClientDescriptor> sourceDesc =
-            new InternalSourceClientDescriptor(
-                portId, uid, attributes, *source, srcDevice, sinkDevice,
-                mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes));
+            new SourceClientDescriptor(
+                portId, uid, attributes, *source, srcDevice, AUDIO_STREAM_PATCH,
+                mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes),
+                true);
+    sourceDesc->setPreferredDeviceId(sinkDevice->getId());
 
     status_t status =
             connectAudioSourceToSink(sourceDesc, sinkDevice, patch, *handle, uid, 0 /* delayMs */);
@@ -4977,14 +5080,15 @@
                 return BAD_VALUE;
             }
 
-            if (!outputDesc->mProfile->isCompatibleProfile(DeviceVector(devDesc),
-                                                           patch->sources[0].sample_rate,
-                                                           NULL,  // updatedSamplingRate
-                                                           patch->sources[0].format,
-                                                           NULL,  // updatedFormat
-                                                           patch->sources[0].channel_mask,
-                                                           NULL,  // updatedChannelMask
-                                                           AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) {
+            if (outputDesc->mProfile->getCompatibilityScore(
+                    DeviceVector(devDesc),
+                    patch->sources[0].sample_rate,
+                    nullptr,  // updatedSamplingRate
+                    patch->sources[0].format,
+                    nullptr,  // updatedFormat
+                    patch->sources[0].channel_mask,
+                    nullptr,  // updatedChannelMask
+                    AUDIO_OUTPUT_FLAG_NONE /*FIXME*/) == IOProfile::NO_MATCH) {
                 ALOGV("%s profile not supported for device %08x", __func__, devDesc->type());
                 return INVALID_OPERATION;
             }
@@ -5032,17 +5136,18 @@
                 return BAD_VALUE;
             }
 
-            if (!inputDesc->mProfile->isCompatibleProfile(DeviceVector(device),
-                                                          patch->sinks[0].sample_rate,
-                                                          NULL, /*updatedSampleRate*/
-                                                          patch->sinks[0].format,
-                                                          NULL, /*updatedFormat*/
-                                                          patch->sinks[0].channel_mask,
-                                                          NULL, /*updatedChannelMask*/
-                                                          // FIXME for the parameter type,
-                                                          // and the NONE
-                                                          (audio_output_flags_t)
-                                                            AUDIO_INPUT_FLAG_NONE)) {
+            if (inputDesc->mProfile->getCompatibilityScore(
+                    DeviceVector(device),
+                    patch->sinks[0].sample_rate,
+                    nullptr, /*updatedSampleRate*/
+                    patch->sinks[0].format,
+                    nullptr, /*updatedFormat*/
+                    patch->sinks[0].channel_mask,
+                    nullptr, /*updatedChannelMask*/
+                    // FIXME for the parameter type,
+                    // and the NONE
+                    (audio_output_flags_t)
+                    AUDIO_INPUT_FLAG_NONE) == IOProfile::NO_MATCH) {
                 return INVALID_OPERATION;
             }
             // TODO: reconfigure output format and channels here
@@ -5524,7 +5629,7 @@
 status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId,
-                                              uid_t uid)
+                                              uid_t uid, bool internal)
 {
     ALOGV("%s", __FUNCTION__);
     *portId = AUDIO_PORT_HANDLE_NONE;
@@ -5557,7 +5662,7 @@
         new SourceClientDescriptor(*portId, uid, *attributes, *source, srcDevice,
                                    mEngine->getStreamTypeForAttributes(*attributes),
                                    mEngine->getProductStrategyForAttributes(*attributes),
-                                   toVolumeSource(*attributes));
+                                   toVolumeSource(*attributes), internal);
 
     status_t status = connectAudioSource(sourceDesc);
     if (status == NO_ERROR) {
@@ -5566,18 +5671,6 @@
     return status;
 }
 
-sp<SourceClientDescriptor> AudioPolicyManager::startAudioSourceInternal(
-        const struct audio_port_config *source, const audio_attributes_t *attributes, uid_t uid)
-{
-    ALOGV("%s", __FUNCTION__);
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-
-    status_t status = startAudioSource(source, attributes, &portId, uid);
-    ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
-    return mAudioSources.valueFor(portId);
-}
-
-
 status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
@@ -5986,15 +6079,26 @@
     // The caller can have the audio config criteria ignored by either passing a null ptr or
     // the AUDIO_CONFIG_INITIALIZER value.
     // If an audio config is specified, current policy is to only allow spatialization for
-    // some positional channel masks and PCM format
+    // some positional channel masks and PCM format and for stereo if low latency performance
+    // mode is not requested.
 
     if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        if (!audio_is_channel_mask_spatialized(config->channel_mask)) {
+        static const bool stereo_spatialization_enabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+        const bool channel_mask_spatialized =
+                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
+                ? audio_channel_mask_contains_stereo(config->channel_mask)
+                : audio_is_channel_mask_spatialized(config->channel_mask);
+        if (!channel_mask_spatialized) {
             return false;
         }
         if (!audio_is_linear_pcm(config->format)) {
             return false;
         }
+        if (config->channel_mask == AUDIO_CHANNEL_OUT_STEREO
+                && ((attr->flags & AUDIO_FLAG_LOW_LATENCY) != 0)) {
+            return false;
+        }
     }
 
     sp<IOProfile> profile =
@@ -6770,6 +6874,12 @@
         closingOutput->stop();
     }
     closingOutput->close();
+    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
+            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+        for (const auto device : closingOutput->devices()) {
+            device->setPreferredConfig(nullptr);
+        }
+    }
 
     removeOutput(output);
     mPreviousOutputs = mOutputs;
@@ -7659,9 +7769,6 @@
     // Choose an input profile based on the requested capture parameters: select the first available
     // profile supporting all requested parameters.
     // The flags can be ignored if it doesn't contain a much match flag.
-    //
-    // TODO: perhaps isCompatibleProfile should return a "matching" score so we can return
-    // the best matching profile, not the first one.
 
     using underlying_input_flag_t = std::underlying_type_t<audio_input_flags_t>;
     const underlying_input_flag_t mustMatchFlag = AUDIO_INPUT_FLAG_MMAP_NOIRQ |
@@ -7678,27 +7785,35 @@
             for (const auto& profile : hwModule->getInputProfiles()) {
                 // profile->log();
                 //updatedFormat = format;
-                if (profile->isCompatibleProfile(DeviceVector(device), samplingRate,
-                                                 &samplingRate  /*updatedSamplingRate*/,
-                                                 format,
-                                                 &format,       /*updatedFormat*/
-                                                 channelMask,
-                                                 &channelMask   /*updatedChannelMask*/,
-                                                 // FIXME ugly cast
-                                                 (audio_output_flags_t) flags,
-                                                 true /*exactMatchRequiredForInputFlags*/)) {
+                if (profile->getCompatibilityScore(
+                        DeviceVector(device),
+                        samplingRate,
+                        &updatedSamplingRate,
+                        format,
+                        &updatedFormat,
+                        channelMask,
+                        &updatedChannelMask,
+                        // FIXME ugly cast
+                        (audio_output_flags_t) flags,
+                        true /*exactMatchRequiredForInputFlags*/) == IOProfile::EXACT_MATCH) {
+                    samplingRate = updatedSamplingRate;
+                    format = updatedFormat;
+                    channelMask = updatedChannelMask;
                     return profile;
                 }
-                if (firstInexact == nullptr && profile->isCompatibleProfile(DeviceVector(device),
-                                                 samplingRate,
-                                                 &updatedSamplingRate,
-                                                 format,
-                                                 &updatedFormat,
-                                                 channelMask,
-                                                 &updatedChannelMask,
-                                                 // FIXME ugly cast
-                                                 (audio_output_flags_t) flags,
-                                                 false /*exactMatchRequiredForInputFlags*/)) {
+                if (firstInexact == nullptr
+                        && profile->getCompatibilityScore(
+                                DeviceVector(device),
+                                samplingRate,
+                                &updatedSamplingRate,
+                                format,
+                                &updatedFormat,
+                                channelMask,
+                                &updatedChannelMask,
+                                // FIXME ugly cast
+                                (audio_output_flags_t) flags,
+                                false /*exactMatchRequiredForInputFlags*/)
+                                != IOProfile::NO_MATCH) {
                     firstInexact = profile;
                 }
             }
@@ -7728,10 +7843,18 @@
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
                                         VolumeSource volumeSource,
                                         int index,
-                                        const DeviceTypeSet& deviceTypes)
+                                        const DeviceTypeSet& deviceTypes,
+                                        bool computeInternalInteraction)
 {
     float volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
 
+    ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
+          volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
+
+    if (!computeInternalInteraction) {
+        return volumeDb;
+    }
+
     // handle the case of accessibility active while a ringtone is playing: if the ringtone is much
     // louder than the accessibility prompt, the prompt cannot be heard, thus masking the touch
     // exploration of the dialer UI. In this situation, bring the accessibility volume closer to
@@ -7741,14 +7864,11 @@
     const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC, false);
     const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM, false);
     const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY, false);
-    // Verify that the current volume source is not the ringer volume to prevent recursively
-    // calling to compute volume. This could happen in cases where a11y and ringer sounds belong
-    // to the same volume group.
-    if (volumeSource != ringVolumeSrc && volumeSource == a11yVolumeSrc
-            && (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
+    if (AUDIO_MODE_RINGTONE == mEngine->getPhoneState() &&
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
-        const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes);
+        const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes,
+                /* computeInternalInteraction= */ false);
         return ringVolumeDb - 4 > volumeDb ? ringVolumeDb - 4 : volumeDb;
     }
 
@@ -7765,7 +7885,8 @@
         auto &voiceCurves = getVolumeCurves(callVolumeSrc);
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
         const float maxVoiceVolDb =
-                computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes)
+                computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes,
+                              /* computeInternalInteraction= */ false)
                 + IN_CALL_EARPIECE_HEADROOM_DB;
         // FIXME: Workaround for call screening applications until a proper audio mode is defined
         // to support this scenario : Exempt the RING stream from the audio cap if the audio was
@@ -7807,12 +7928,8 @@
         // when the phone is ringing we must consider that music could have been paused just before
         // by the music application and behave as if music was active if the last music track was
         // just stopped
-        // Verify that the current volume source is not the music volume to prevent recursively
-        // calling to compute volume. This could happen in cases where music and
-        // (alarm, ring, notification, system, etc.) sounds belong to the same volume group.
-        if (volumeSource != musicVolumeSrc &&
-            (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
-                || mLimitRingtoneVolume)) {
+        if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)
+                || mLimitRingtoneVolume) {
             volumeDb += SONIFICATION_HEADSET_VOLUME_FACTOR_DB;
             DeviceTypeSet musicDevice =
                     mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
@@ -7821,7 +7938,8 @@
             float musicVolDb = computeVolume(musicCurves,
                                              musicVolumeSrc,
                                              musicCurves.getVolumeIndex(musicDevice),
-                                             musicDevice);
+                                             musicDevice,
+                                             /* computeInternalInteraction= */ false);
             float minVolDb = (musicVolDb > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
                         musicVolDb : SONIFICATION_HEADSET_VOLUME_MIN_DB;
             if (volumeDb > minVolDb) {
@@ -8404,6 +8522,12 @@
         ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
     }
+    if ((flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+        auto portConfig = desc->getConfig();
+        for (const auto& device : devices) {
+            device->setPreferredConfig(&portConfig);
+        }
+    }
 
     // Here is where the out_set_parameters() for card & device gets called
     sp<DeviceDescriptor> device = devices.getDeviceForOpening();
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 61be09f..50e8ed8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -141,7 +141,8 @@
                                          audio_input_flags_t flags,
                                          audio_port_handle_t *selectedDeviceId,
                                          input_type_t *inputType,
-                                         audio_port_handle_t *portId);
+                                         audio_port_handle_t *portId,
+                                         uint32_t *virtualDeviceId);
 
         // indicates to the audio policy manager that the input starts being used.
         virtual status_t startInput(audio_port_handle_t portId);
@@ -292,6 +293,7 @@
 
         virtual status_t registerPolicyMixes(const Vector<AudioMix>& mixes);
         virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes);
+        virtual status_t getRegisteredPolicyMixes(std::vector<AudioMix>& mixes) override;
         virtual status_t updatePolicyMix(
                 const AudioMix& mix,
                 const std::vector<AudioMixMatchCriterion>& updatedCriteria) override;
@@ -339,7 +341,8 @@
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
                                           audio_port_handle_t *portId,
-                                          uid_t uid);
+                                          uid_t uid,
+                                          bool internal = false);
         virtual status_t stopAudioSource(audio_port_handle_t portId);
 
         virtual status_t setMasterMono(bool mono);
@@ -562,12 +565,36 @@
         status_t resetInputDevice(audio_io_handle_t input,
                                   audio_patch_handle_t *patchHandle = NULL);
 
-        // compute the actual volume for a given stream according to the requested index and a particular
-        // device
-        virtual float computeVolume(IVolumeCurves &curves,
-                                    VolumeSource volumeSource,
-                                    int index,
-                                    const DeviceTypeSet& deviceTypes);
+        /**
+         * Compute volume in DB that should be applied for a volume source and device types for a
+         * particular volume index.
+         *
+         * <p><b>Note:</b>Internally the compute method recursively calls itself to accurately
+         * determine the volume given the currently active sources and devices. Some of the
+         * interaction that require recursive computation are:
+         * <ul>
+         * <li>Match accessibility volume if ringtone volume is much louder</li>
+         * <li>If voice call is active cap other volumes (except ringtone and accessibility)</li>
+         * <li>Attenuate notification if headset is connected to prevent burst in user's ear</li>
+         * <li>Attenuate ringtone if headset is connected and music is not playing and speaker is
+         *      part of the devices to prevent burst in user's ear</li>
+         * <li>Limit music volume if headset is connected and notification is also active</li>
+         * </ul>
+         *
+         * @param curves volume curves to use for calculating volume value given the index
+         * @param volumeSource source (use case) of the volume
+         * @param index index to match in the volume curves for the calculation
+         * @param deviceTypes devices that should be considered in the volume curves for the
+         *        calculation
+         * @param computeInternalInteraction boolean indicating whether recursive volume computation
+         *        should continue within the volume computation. Defaults to {@code true} so the
+         *        volume interactions can be computed. Calls within the method should always set the
+         *        the value to {@code false} to prevent infinite recursion.
+         * @return computed volume in DB
+         */
+        virtual float computeVolume(IVolumeCurves &curves, VolumeSource volumeSource,
+                               int index, const DeviceTypeSet& deviceTypes,
+                               bool computeInternalInteraction = true);
 
         // rescale volume index from srcStream within range of dstStream
         int rescaleVolumeIndex(int srcIndex,
@@ -1055,9 +1082,6 @@
         bool isMsdPatch(const audio_patch_handle_t &handle) const;
 
 private:
-        sp<SourceClientDescriptor> startAudioSourceInternal(
-                const struct audio_port_config *source, const audio_attributes_t *attributes,
-                uid_t uid);
 
         void onNewAudioModulesAvailableInt(DeviceVector *newDevices);
 
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 1a0bf3c..9b7a470 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -11,6 +12,7 @@
     name: "libaudiopolicyservice_dependencies",
 
     shared_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -106,3 +108,9 @@
         "libsensorprivacy",
     ],
 }
+
+cc_library_headers {
+    name: "libaudiopolicyservice_headers",
+    host_supported: true,
+    export_include_dirs: ["."],
+}
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 6e1ecec..a862037 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -20,6 +20,7 @@
 #include "AudioPolicyService.h"
 #include "AudioRecordClient.h"
 #include "TypeConverter.h"
+#include <android_media_audiopolicy.h>
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioValidator.h>
@@ -45,6 +46,7 @@
 #define MAX_ITEMS_PER_LIST 1024
 
 namespace android {
+namespace audiopolicy_flags = android::media::audiopolicy;
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
 using content::AttributionSourceState;
@@ -62,6 +64,8 @@
 using media::audio::common::AudioUuid;
 using media::audio::common::Int;
 
+constexpr int kDefaultVirtualDeviceId = 0;
+
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
     AUDIO_USAGE_CALL_ASSISTANT,
     AUDIO_USAGE_EMERGENCY,
@@ -627,6 +631,8 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
             attributionSource)));
 
+    uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
+
     // check calling permissions.
     // Capturing from the following sources does not require permission RECORD_AUDIO
     // as the captured audio does not come from a microphone:
@@ -698,7 +704,8 @@
             status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
                                                           attributionSource, &config,
                                                           flags, &selectedDeviceId,
-                                                          &inputType, &portId);
+                                                          &inputType, &portId,
+                                                          &virtualDeviceId);
 
         }
         audioPolicyEffects = mAudioPolicyEffects;
@@ -737,6 +744,14 @@
                 LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
                         __func__, (int)inputType);
             }
+
+            if (audiopolicy_flags::record_audio_device_aware_permission()) {
+                // enforce device-aware RECORD_AUDIO permission
+                if (virtualDeviceId != kDefaultVirtualDeviceId &&
+                    !recordingAllowed(attributionSource, virtualDeviceId, inputSource)) {
+                    status = PERMISSION_DENIED;
+                }
+            }
         }
 
         if (status != NO_ERROR) {
@@ -752,6 +767,7 @@
 
         sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
                                                              selectedDeviceId, attributionSource,
+                                                             virtualDeviceId,
                                                              canCaptureOutput, canCaptureHotword,
                                                              mOutputCommandThread);
         mAudioRecordClients.add(portId, client);
@@ -807,8 +823,8 @@
     msg << "Audio recording on session " << client->session;
 
     // check calling permissions
-    if (!(startRecording(client->attributionSource, String16(msg.str().c_str()),
-                         client->attributes.source)
+    if (!(startRecording(client->attributionSource, client->virtualDeviceId,
+                         String16(msg.str().c_str()), client->attributes.source)
             || client->attributes.source == AUDIO_SOURCE_FM_TUNER
             || client->attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX
             || client->attributes.source == AUDIO_SOURCE_ECHO_REFERENCE)) {
@@ -826,7 +842,8 @@
     if (client->active) {
         ALOGE("Client should never be active before startInput. Uid %d port %d",
                 client->attributionSource.uid, portId);
-        finishRecording(client->attributionSource, client->attributes.source);
+        finishRecording(client->attributionSource, client->virtualDeviceId,
+                        client->attributes.source);
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
 
@@ -922,7 +939,8 @@
         client->active = false;
         client->startTimeNs = 0;
         updateUidStates_l();
-        finishRecording(client->attributionSource, client->attributes.source);
+        finishRecording(client->attributionSource, client->virtualDeviceId,
+                        client->attributes.source);
     }
 
     return binderStatusFromStatusT(status);
@@ -951,7 +969,7 @@
     updateUidStates_l();
 
     // finish the recording app op
-    finishRecording(client->attributionSource, client->attributes.source);
+    finishRecording(client->attributionSource, client->virtualDeviceId, client->attributes.source);
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->stopInput(portId));
 }
@@ -1561,17 +1579,20 @@
     std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
     unsigned int generation;
 
-    audio_utils::lock_guard _l(mMutex);
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
-
     const AttributionSourceState attributionSource = getCallingAttributionSource();
-
     AutoCallerClear acc;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
-    numPortsReq = std::min(numPortsReq, num_ports);
+    {
+        audio_utils::lock_guard _l(mMutex);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        // AudioPolicyManager->listAudioPorts makes a deep copy of port structs into ports
+        // so it is safe to access after releasing the mutex
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+                mAudioPolicyManager->listAudioPorts(
+                        role, type, &num_ports, ports.get(), &generation)));
+        numPortsReq = std::min(numPortsReq, num_ports);
+    }
 
     if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
         for (size_t i = 0; i < numPortsReq; ++i) {
@@ -1601,15 +1622,19 @@
 Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
-    audio_utils::lock_guard _l(mMutex);
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
 
     const AttributionSourceState attributionSource = getCallingAttributionSource();
-
     AutoCallerClear acc;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+
+    {
+        audio_utils::lock_guard _l(mMutex);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        // AudioPolicyManager->getAudioPort makes a deep copy of the port struct into port
+        // so it is safe to access after releasing the mutex
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
+    }
 
     if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
         anonymizePortBluetoothAddress(&port);
@@ -1672,17 +1697,20 @@
     std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
     unsigned int generation;
 
-    audio_utils::lock_guard _l(mMutex);
-    if (mAudioPolicyManager == NULL) {
-        return binderStatusFromStatusT(NO_INIT);
-    }
-
     const AttributionSourceState attributionSource = getCallingAttributionSource();
-
     AutoCallerClear acc;
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-            mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
-    numPatchesReq = std::min(numPatchesReq, num_patches);
+
+    {
+        audio_utils::lock_guard _l(mMutex);
+        if (mAudioPolicyManager == NULL) {
+            return binderStatusFromStatusT(NO_INIT);
+        }
+        // AudioPolicyManager->listAudioPatches makes a deep copy of patches structs into patches
+        // so it is safe to access after releasing the mutex
+        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+                mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
+        numPatchesReq = std::min(numPatchesReq, num_patches);
+    }
 
     if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
         for (size_t i = 0; i < numPatchesReq; ++i) {
@@ -1810,6 +1838,23 @@
     }
 }
 
+Status
+AudioPolicyService::getRegisteredPolicyMixes(std::vector<::android::media::AudioMix>* mixesAidl) {
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+
+    std::vector<AudioMix> mixes;
+    int status = mAudioPolicyManager->getRegisteredPolicyMixes(mixes);
+
+    for (const auto& mix : mixes) {
+        media::AudioMix aidlMix = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_AudioMix(mix));
+        mixesAidl->push_back(aidlMix);
+    }
+
+    return binderStatusFromStatusT(status);
+}
+
 Status AudioPolicyService::updatePolicyMixes(
         const ::std::vector<::android::media::AudioMixUpdate>& updates) {
     audio_utils::lock_guard _l(mMutex);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 717640f..3e1245b 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -163,7 +163,8 @@
 BINDER_METHOD_ENTRY(setPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
-
+BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
+                                                     \
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
     using Code = int;
@@ -227,6 +228,7 @@
       mDestroyAudioPolicyManager(destroyAudioPolicyManager),
       mUsecaseValidator(media::createUsecaseValidator()) {
       setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+      setInheritRt(true);
 }
 
 void AudioPolicyService::loadAudioPolicyManager()
@@ -1188,12 +1190,13 @@
         if (client->silenced != silenced) {
             if (client->active) {
                 if (silenced) {
-                    finishRecording(client->attributionSource, client->attributes.source);
+                    finishRecording(client->attributionSource, client->virtualDeviceId,
+                                    client->attributes.source);
                 } else {
                     std::stringstream msg;
                     msg << "Audio recording un-silenced on session " << client->session;
-                    if (!startRecording(client->attributionSource, String16(msg.str().c_str()),
-                            client->attributes.source)) {
+                    if (!startRecording(client->attributionSource, client->virtualDeviceId,
+                                        String16(msg.str().c_str()), client->attributes.source)) {
                         silenced = true;
                     }
                 }
@@ -1352,7 +1355,8 @@
         case TRANSACTION_getDevicesForRoleAndCapturePreset:
         case TRANSACTION_getSpatializer:
         case TRANSACTION_setPreferredMixerAttributes:
-        case TRANSACTION_clearPreferredMixerAttributes: {
+        case TRANSACTION_clearPreferredMixerAttributes:
+        case TRANSACTION_getRegisteredPolicyMixes: {
             if (!isServiceUid(IPCThreadState::self()->getCallingUid())) {
                 ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
                       __func__, code, IPCThreadState::self()->getCallingPid(),
@@ -1421,144 +1425,13 @@
     if (in == BAD_TYPE || out == BAD_TYPE || err == BAD_TYPE) {
         return BAD_VALUE;
     }
-    if (args.size() >= 3 && args[0] == String16("set-uid-state")) {
-        return handleSetUidState(args, err);
-    } else if (args.size() >= 2 && args[0] == String16("reset-uid-state")) {
-        return handleResetUidState(args, err);
-    } else if (args.size() >= 2 && args[0] == String16("get-uid-state")) {
-        return handleGetUidState(args, out, err);
-    } else if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
+    if (args.size() >= 1 && args[0] == String16("purge_permission-cache")) {
         purgePermissionCache();
         return NO_ERROR;
-    } else if (args.size() == 1 && args[0] == String16("help")) {
-        printHelp(out);
-        return NO_ERROR;
     }
-    printHelp(err);
     return BAD_VALUE;
 }
 
-static status_t getUidForPackage(String16 packageName, int userId, /*inout*/uid_t& uid, int err) {
-    if (userId < 0) {
-        ALOGE("Invalid user: %d", userId);
-        dprintf(err, "Invalid user: %d\n", userId);
-        return BAD_VALUE;
-    }
-
-    PermissionController pc;
-    uid = pc.getPackageUid(packageName, 0);
-    if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", String8(packageName).c_str());
-        dprintf(err, "Unknown package: '%s'\n", String8(packageName).c_str());
-        return BAD_VALUE;
-    }
-
-    uid = multiuser_get_uid(userId, uid);
-    return NO_ERROR;
-}
-
-status_t AudioPolicyService::handleSetUidState(Vector<String16>& args, int err) {
-    // Valid arg.size() is 3 or 5, args.size() is 5 with --user option.
-    if (!(args.size() == 3 || args.size() == 5)) {
-        printHelp(err);
-        return BAD_VALUE;
-    }
-
-    bool active = false;
-    if (args[2] == String16("active")) {
-        active = true;
-    } else if ((args[2] != String16("idle"))) {
-        ALOGE("Expected active or idle but got: '%s'", String8(args[2]).c_str());
-        return BAD_VALUE;
-    }
-
-    int userId = 0;
-    if (args.size() >= 5 && args[3] == String16("--user")) {
-        userId = atoi(String8(args[4]));
-    }
-
-    uid_t uid;
-    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
-        return BAD_VALUE;
-    }
-
-    sp<UidPolicy> uidPolicy;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        uidPolicy = mUidPolicy;
-    }
-    if (uidPolicy) {
-        uidPolicy->addOverrideUid(uid, active);
-        return NO_ERROR;
-    }
-    return NO_INIT;
-}
-
-status_t AudioPolicyService::handleResetUidState(Vector<String16>& args, int err) {
-    // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
-    if (!(args.size() == 2 || args.size() == 4)) {
-        printHelp(err);
-        return BAD_VALUE;
-    }
-
-    int userId = 0;
-    if (args.size() >= 4 && args[2] == String16("--user")) {
-        userId = atoi(String8(args[3]));
-    }
-
-    uid_t uid;
-    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
-        return BAD_VALUE;
-    }
-
-    sp<UidPolicy> uidPolicy;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        uidPolicy = mUidPolicy;
-    }
-    if (uidPolicy) {
-        uidPolicy->removeOverrideUid(uid);
-        return NO_ERROR;
-    }
-    return NO_INIT;
-}
-
-status_t AudioPolicyService::handleGetUidState(Vector<String16>& args, int out, int err) {
-    // Valid arg.size() is 2 or 4, args.size() is 4 with --user option.
-    if (!(args.size() == 2 || args.size() == 4)) {
-        printHelp(err);
-        return BAD_VALUE;
-    }
-
-    int userId = 0;
-    if (args.size() >= 4 && args[2] == String16("--user")) {
-        userId = atoi(String8(args[3]));
-    }
-
-    uid_t uid;
-    if (getUidForPackage(args[1], userId, uid, err) == BAD_VALUE) {
-        return BAD_VALUE;
-    }
-
-    sp<UidPolicy> uidPolicy;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        uidPolicy = mUidPolicy;
-    }
-    if (uidPolicy) {
-        return dprintf(out, uidPolicy->isUidActive(uid) ? "active\n" : "idle\n");
-    }
-    return NO_INIT;
-}
-
-status_t AudioPolicyService::printHelp(int out) {
-    return dprintf(out, "Audio policy service commands:\n"
-        "  get-uid-state <PACKAGE> [--user USER_ID] gets the uid state\n"
-        "  set-uid-state <PACKAGE> <active|idle> [--user USER_ID] overrides the uid state\n"
-        "  reset-uid-state <PACKAGE> [--user USER_ID] clears the uid state override\n"
-        "  help print this message\n");
-}
-
 status_t AudioPolicyService::registerOutput(audio_io_handle_t output,
                         const audio_config_base_t& config,
                         const audio_output_flags_t flags) {
@@ -1619,10 +1492,6 @@
     checkRegistered();
     {
         audio_utils::lock_guard _l(mMutex);
-        auto overrideIter = mOverrideUids.find(uid);
-        if (overrideIter != mOverrideUids.end()) {
-            return overrideIter->second.first;
-        }
         // In an absense of the ActivityManager, assume everything to be active.
         if (!mObserverRegistered) return true;
         auto cacheIter = mCachedUids.find(uid);
@@ -1648,20 +1517,6 @@
     checkRegistered();
     {
         audio_utils::lock_guard _l(mMutex);
-        auto overrideIter = mOverrideUids.find(uid);
-        if (overrideIter != mOverrideUids.end()) {
-            if (overrideIter->second.first) {
-                if (overrideIter->second.second != ActivityManager::PROCESS_STATE_UNKNOWN) {
-                    return overrideIter->second.second;
-                } else {
-                    auto cacheIter = mCachedUids.find(uid);
-                    if (cacheIter != mCachedUids.end()) {
-                        return cacheIter->second.second;
-                    }
-                }
-            }
-            return ActivityManager::PROCESS_STATE_UNKNOWN;
-        }
         // In an absense of the ActivityManager, assume everything to be active.
         if (!mObserverRegistered) {
             return ActivityManager::PROCESS_STATE_TOP;
@@ -1714,10 +1569,6 @@
 void AudioPolicyService::UidPolicy::onUidProcAdjChanged(uid_t uid __unused, int32_t adj __unused) {
 }
 
-void AudioPolicyService::UidPolicy::updateOverrideUid(uid_t uid, bool active, bool insert) {
-    updateUid(&mOverrideUids, uid, active, ActivityManager::PROCESS_STATE_UNKNOWN, insert);
-}
-
 void AudioPolicyService::UidPolicy::notifyService() {
     sp<AudioPolicyService> service = mService.promote();
     if (service != nullptr) {
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5adedc6..5297e47 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -311,6 +311,8 @@
     binder::Status clearPreferredMixerAttributes(const media::audio::common::AudioAttributes& attr,
                                                  int32_t portId,
                                                  int32_t uid) override;
+    binder::Status getRegisteredPolicyMixes(
+            std::vector <::android::media::AudioMix>* mixes) override;
 
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
@@ -498,8 +500,6 @@
                 int32_t capability) override;
         void onUidProcAdjChanged(uid_t uid, int32_t adj) override;
 
-        void addOverrideUid(uid_t uid, bool active) { updateOverrideUid(uid, active, true); }
-        void removeOverrideUid(uid_t uid) { updateOverrideUid(uid, false, false); }
 
         void updateUid(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
                        uid_t uid, bool active, int state, bool insert);
@@ -508,7 +508,6 @@
 
      private:
         void notifyService();
-        void updateOverrideUid(uid_t uid, bool active, bool insert);
         void updateUidLocked(std::unordered_map<uid_t, std::pair<bool, int>> *uids,
                              uid_t uid, bool active, int state, bool insert);
         void checkRegistered();
@@ -517,7 +516,6 @@
         audio_utils::mutex mMutex{audio_utils::MutexOrder::kUidPolicy_Mutex};
         ActivityManager mAm;
         bool mObserverRegistered = false;
-        std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids GUARDED_BY(mMutex);
         std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids GUARDED_BY(mMutex);
         std::vector<uid_t> mAssistantUids;
         std::vector<uid_t> mActiveAssistantUids;
@@ -543,6 +541,10 @@
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
 
+            binder::Status onSensorPrivacyStateChanged(int, int, int) {
+                return binder::Status::ok();
+            }
+
         private:
             wp<AudioPolicyService> mService;
             std::atomic_bool mSensorPrivacyEnabled = false;
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index a89a84d..6d8b3cf 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -18,9 +18,10 @@
 
 #include "AudioRecordClient.h"
 #include "AudioPolicyService.h"
+#include <android_media_audiopolicy.h>
 
 namespace android::media::audiopolicy {
-
+namespace audiopolicy_flags = android::media::audiopolicy;
 using android::AudioPolicyService;
 
 namespace {
@@ -59,8 +60,10 @@
 // static
 sp<OpRecordAudioMonitor>
 OpRecordAudioMonitor::createIfNeeded(
-            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
-            wp<AudioPolicyService::AudioCommandThread> commandThread)
+        const AttributionSourceState &attributionSource,
+        const uint32_t virtualDeviceId,
+        const audio_attributes_t &attr,
+        wp<AudioPolicyService::AudioCommandThread> commandThread)
 {
     if (isAudioServerOrRootUid(attributionSource.uid)) {
         ALOGV("not silencing record for audio or root source %s",
@@ -78,15 +81,19 @@
             || attributionSource.packageName.value().size() == 0) {
         return nullptr;
     }
-    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+
+    return new OpRecordAudioMonitor(attributionSource, virtualDeviceId, attr,
+                                    getOpForSource(attr.source), commandThread);
 }
 
 OpRecordAudioMonitor::OpRecordAudioMonitor(
-        const AttributionSourceState& attributionSource, int32_t appOp,
+        const AttributionSourceState &attributionSource,
+        const uint32_t virtualDeviceId, const audio_attributes_t &attr,
+        int32_t appOp,
         wp<AudioPolicyService::AudioCommandThread> commandThread) :
-            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
-            mCommandThread(commandThread)
-{
+        mHasOp(true), mAttributionSource(attributionSource),
+        mVirtualDeviceId(virtualDeviceId), mAttr(attr), mAppOp(appOp),
+        mCommandThread(commandThread) {
 }
 
 OpRecordAudioMonitor::~OpRecordAudioMonitor()
@@ -131,7 +138,12 @@
     const int32_t mode = mAppOpsManager.checkOp(mAppOp,
             mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
                 mAttributionSource.packageName.value_or(""))));
-    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+    bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+
+    if (audiopolicy_flags::record_audio_device_aware_permission()) {
+        const bool canRecord = recordingAllowed(mAttributionSource, mVirtualDeviceId, mAttr.source);
+        hasIt = hasIt && canRecord;
+    }
     // verbose logging only log when appOp changed
     ALOGI_IF(hasIt != mHasOp.load(),
             "App op %d missing, %ssilencing record %s",
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
index d3be316..76aff41 100644
--- a/services/audiopolicy/service/AudioRecordClient.h
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -38,12 +38,16 @@
 
     static sp<OpRecordAudioMonitor> createIfNeeded(
             const AttributionSourceState& attributionSource,
+            uint32_t virtualDeviceId,
             const audio_attributes_t& attr,
             wp<AudioPolicyService::AudioCommandThread> commandThread);
 
 private:
-    OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
-            wp<AudioPolicyService::AudioCommandThread> commandThread);
+    OpRecordAudioMonitor(const AttributionSourceState &attributionSource,
+                         uint32_t virtualDeviceId,
+                         const audio_attributes_t &attr,
+                         int32_t appOp,
+                         wp<AudioPolicyService::AudioCommandThread> commandThread);
 
     void onFirstRef() override;
 
@@ -67,6 +71,8 @@
 
     std::atomic_bool mHasOp;
     const AttributionSourceState mAttributionSource;
+    const uint32_t mVirtualDeviceId;
+    const audio_attributes_t mAttr;
     const int32_t mAppOp;
     wp<AudioPolicyService::AudioCommandThread> mCommandThread;
 };
@@ -81,15 +87,20 @@
                       const audio_session_t session, audio_port_handle_t portId,
                       const audio_port_handle_t deviceId,
                       const AttributionSourceState& attributionSource,
+                      const uint32_t virtualDeviceId,
                       bool canCaptureOutput, bool canCaptureHotword,
                       wp<AudioPolicyService::AudioCommandThread> commandThread) :
                 AudioClient(attributes, io, attributionSource,
                     session, portId, deviceId), attributionSource(attributionSource),
+                    virtualDeviceId(virtualDeviceId),
                     startTimeNs(0), canCaptureOutput(canCaptureOutput),
                     canCaptureHotword(canCaptureHotword), silenced(false),
                     mOpRecordAudioMonitor(
                             OpRecordAudioMonitor::createIfNeeded(attributionSource,
-                            attributes, commandThread)) {}
+                                                                 virtualDeviceId,
+                                                                 attributes, commandThread)) {
+
+            }
             ~AudioRecordClient() override = default;
 
     bool hasOp() const {
@@ -97,6 +108,7 @@
     }
 
     const AttributionSourceState attributionSource; // attribution source of client
+    const uint32_t virtualDeviceId; // id of the virtual device associated with the audio device
     nsecs_t startTimeNs;
     const bool canCaptureOutput;
     const bool canCaptureHotword;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index b6b9720..c98f8df 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
 #include <audio_utils/fixedfft.h>
 #include <com_android_media_audio.h>
 #include <cutils/bitops.h>
+#include <cutils/properties.h>
 #include <hardware/sensors.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -291,6 +292,7 @@
       mPolicyCallback(callback) {
     ALOGV("%s", __func__);
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    setInheritRt(true);
 }
 
 void Spatializer::onFirstRef() {
@@ -396,7 +398,13 @@
         return status;
     }
     for (const auto channelMask : channelMasks) {
-        if (!audio_is_channel_mask_spatialized(channelMask)) {
+        static const bool stereo_spatialization_enabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+        const bool channel_mask_spatialized =
+                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
+                ? audio_channel_mask_contains_stereo(channelMask)
+                : audio_is_channel_mask_spatialized(channelMask);
+        if (!channel_mask_spatialized) {
             ALOGW("%s: ignoring channelMask:%#x", __func__, channelMask);
             continue;
         }
@@ -1089,7 +1097,7 @@
 }
 
 void Spatializer::checkSensorsState_l() {
-    audio_latency_mode_t requestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
+    mRequestedLatencyMode = AUDIO_LATENCY_MODE_FREE;
     const bool supportsSetLatencyMode = !mSupportedLatencyModes.empty();
     bool supportsLowLatencyMode;
     if (com::android::media::audio::dsa_over_bt_le_audio()) {
@@ -1110,7 +1118,7 @@
                     && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
                     && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
                 if (supportsLowLatencyMode) {
-                    requestedLatencyMode = selectHeadtrackingConnectionMode_l();
+                    mRequestedLatencyMode = selectHeadtrackingConnectionMode_l();
                 }
                 if (mEngine != nullptr) {
                     setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
@@ -1132,9 +1140,9 @@
     }
     if (mOutput != AUDIO_IO_HANDLE_NONE && supportsSetLatencyMode) {
         const status_t status =
-                AudioSystem::setRequestedLatencyMode(mOutput, requestedLatencyMode);
+                AudioSystem::setRequestedLatencyMode(mOutput, mRequestedLatencyMode);
         ALOGD("%s: setRequestedLatencyMode for output thread(%d) to %s returned %d", __func__,
-              mOutput, toString(requestedLatencyMode).c_str(), status);
+              mOutput, toString(mRequestedLatencyMode).c_str(), status);
     }
 }
 
@@ -1239,6 +1247,10 @@
     base::StringAppendF(&ss, "%sDisplayOrientation: %f\n", prefixSpace.c_str(),
                         mDisplayOrientation);
 
+    // 4. Show flag or property state.
+    base::StringAppendF(&ss, "%sStereo Spatialization: %s\n", prefixSpace.c_str(),
+            com_android_media_audio_stereo_spatialization() ? "true" : "false");
+
     ss.append(prefixSpace + "CommandLog:\n");
     ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
 
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 355df18..c5f159c 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -27,6 +27,7 @@
 #include <audio_utils/SimpleLog.h>
 #include <math.h>
 #include <media/AudioEffect.h>
+#include <media/MediaMetricsItem.h>
 #include <media/audiohal/EffectsFactoryHalInterface.h>
 #include <media/VectorRecorder.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -153,6 +154,34 @@
         return mLevel;
     }
 
+    /** For test only */
+    std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
+            getSupportedHeadtrackingConnectionModes() const {
+        return mSupportedHeadtrackingConnectionModes;
+    }
+
+    /** For test only */
+    media::audio::common::HeadTracking::ConnectionMode getHeadtrackingConnectionMode() const {
+        return mHeadtrackingConnectionMode;
+    }
+
+    /** For test only */
+    std::vector<audio_latency_mode_t> getSupportedLatencyModes() const {
+        audio_utils::lock_guard lock(mMutex);
+        return mSupportedLatencyModes;
+    }
+
+    /** For test only */
+    std::vector<audio_latency_mode_t> getOrderedLowLatencyModes() const {
+        return mOrderedLowLatencyModes;
+    }
+
+    /** For test only */
+    audio_latency_mode_t getRequestedLatencyMode() const {
+        audio_utils::lock_guard lock(mMutex);
+        return mRequestedLatencyMode;
+    }
+
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
@@ -164,6 +193,12 @@
     /** Returns the output stream the spatializer is attached to. */
     audio_io_handle_t getOutput() const { audio_utils::lock_guard lock(mMutex); return mOutput; }
 
+    /** For test only */
+    void setOutput(audio_io_handle_t output) {
+        audio_utils::lock_guard lock(mMutex);
+        mOutput = output;
+    }
+
     void updateActiveTracks(size_t numActiveTracks);
 
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -188,6 +223,10 @@
     // NO_INIT: Spatializer creation failed.
     static void sendEmptyCreateSpatializerMetricWithStatus(status_t status);
 
+    /** Made public for test only */
+    void onSupportedLatencyModesChangedMsg(
+            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
+
 private:
     Spatializer(effect_descriptor_t engineDescriptor,
                      SpatializerPolicyCallback *callback);
@@ -200,8 +239,6 @@
 
     void onHeadToStagePoseMsg(const std::vector<float>& headToStage);
     void onActualModeChangeMsg(media::HeadTrackingMode mode);
-    void onSupportedLatencyModesChangedMsg(
-            audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
 
     static constexpr int kMaxEffectParamValues = 10;
     /**
@@ -484,9 +521,11 @@
     std::vector<media::audio::common::Spatialization::Mode> mSpatializationModes;
     std::vector<audio_channel_mask_t> mChannelMasks;
     bool mSupportsHeadTracking;
-    /** List of supported headtracking connection modes reported by the spatializer.
+
+    /** List of supported head tracking connection modes reported by the spatializer.
      * If the list is empty, the spatializer does not support any optional connection
      * mode and mode HeadTracking::ConnectionMode::FRAMEWORK_PROCESSED is assumed.
+     * This is set in the factory constructor and can be accessed without mutex.
      */
     std::unordered_set<media::audio::common::HeadTracking::ConnectionMode>
             mSupportedHeadtrackingConnectionModes;
@@ -504,6 +543,9 @@
     std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
     /** preference order for low latency modes according to persist.bluetooth.hid.transport */
     std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
+
+    audio_latency_mode_t mRequestedLatencyMode GUARDED_BY(mMutex) = AUDIO_LATENCY_MODE_FREE;
+
     /** string to latency mode map used to parse bluetooth.core.le.dsa_transport_preference */
     static const std::map<std::string, audio_latency_mode_t> sStringToLatencyModeMap;
     static const std::vector<const char*> sHeadPoseKeys;
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index 4af66bc..fc349ee 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -33,11 +34,14 @@
         "libmedia_helper",
         "libutils",
         "libxml2",
+        "server_configurable_flags",
     ],
 
     static_libs: [
+        "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "libaudiopolicycomponents",
+        "libflagtest",
         "libgmock",
     ],
 
@@ -104,3 +108,40 @@
     test_suites: ["device-tests"],
 
 }
+
+cc_test {
+    name: "spatializer_tests",
+
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+        "libaudiopolicyservice_dependencies",
+    ],
+
+    require_root: true,
+
+    shared_libs: [
+        "libaudioclient",
+        "libaudiofoundation",
+        "libcutils",
+        "liblog",
+    ],
+
+    static_libs: [
+        "libaudiopolicyservice",
+    ],
+
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudiopolicyservice_headers",
+        "libmediametrics_headers",
+    ],
+
+    srcs: ["spatializer_tests.cpp"],
+
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index 31ee252..aa7c9cd 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -31,6 +31,7 @@
     using AudioPolicyManager::getConfig;
     using AudioPolicyManager::initialize;
     using AudioPolicyManager::getOutputs;
+    using AudioPolicyManager::getInputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
     using AudioPolicyManager::setSurroundFormatEnabled;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 74d3474..f9d5946 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -28,6 +28,8 @@
 #include <android-base/file.h>
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
+#include <android_media_audiopolicy.h>
+#include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
@@ -43,6 +45,7 @@
 
 using namespace android;
 using testing::UnorderedElementsAre;
+using testing::IsEmpty;
 using android::content::AttributionSourceState;
 
 namespace {
@@ -92,6 +95,12 @@
     return attributionSourceState;
 }
 
+bool equals(const audio_config_base_t& config1, const audio_config_base_t& config2) {
+    return config1.format == config2.format
+            && config1.sample_rate == config2.sample_rate
+            && config1.channel_mask == config2.channel_mask;
+}
+
 } // namespace
 
 TEST(AudioPolicyConfigTest, DefaultConfigForTestsIsEmpty) {
@@ -195,7 +204,8 @@
             audio_channel_mask_t channelMask,
             int sampleRate,
             audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE,
-            audio_port_handle_t *portId = nullptr);
+            audio_port_handle_t *portId = nullptr,
+            uint32_t *virtualDeviceId = nullptr);
     PatchCountCheck snapshotPatchCount() { return PatchCountCheck(mClient.get()); }
 
     void getAudioPorts(audio_port_type_t type, audio_port_role_t role,
@@ -307,7 +317,8 @@
         audio_channel_mask_t channelMask,
         int sampleRate,
         audio_input_flags_t flags,
-        audio_port_handle_t *portId) {
+        audio_port_handle_t *portId,
+        uint32_t *virtualDeviceId) {
     audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
     config.sample_rate = sampleRate;
     config.channel_mask = channelMask;
@@ -315,11 +326,12 @@
     audio_port_handle_t localPortId;
     if (!portId) portId = &localPortId;
     *portId = AUDIO_PORT_HANDLE_NONE;
+    if (!virtualDeviceId) virtualDeviceId = 0;
     AudioPolicyInterface::input_type_t inputType;
     AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
     ASSERT_EQ(OK, mManager->getInputForAttr(
             &attr, input, riid, session, attributionSource, &config, flags,
-            selectedDeviceId, &inputType, portId));
+            selectedDeviceId, &inputType, portId, virtualDeviceId));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
 }
 
@@ -1266,6 +1278,54 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
+    const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedChannelMask(deviceChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_attributes_t attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+                               AUDIO_SOURCE_VOICE_COMMUNICATION,AUDIO_FLAG_NONE, ""};
+    AudioPolicyInterface::input_type_t inputType;
+    audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
+    AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
+    audio_config_base_t requestedConfig = {
+            .channel_mask = AUDIO_CHANNEL_IN_STEREO,
+            .format = AUDIO_FORMAT_PCM_16_BIT,
+            .sample_rate = 48000
+    };
+    audio_config_base_t config = requestedConfig;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    uint32_t *virtualDeviceId = 0;
+    ASSERT_EQ(OK, mManager->getInputForAttr(
+            &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+            AUDIO_INPUT_FLAG_NONE,
+            &selectedDeviceId, &inputType, &portId, virtualDeviceId));
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+    ASSERT_TRUE(equals(requestedConfig, config));
+
+    attr = {AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN,
+            AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_FLAG_NONE, ""};
+    requestedConfig.channel_mask = deviceChannelMask;
+    config = requestedConfig;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    input = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_EQ(OK, mManager->getInputForAttr(
+            &attr, &input, 1 /*riid*/, AUDIO_SESSION_NONE, attributionSource, &config,
+            AUDIO_INPUT_FLAG_NONE,
+            &selectedDeviceId, &inputType, &portId, virtualDeviceId));
+    ASSERT_NE(AUDIO_PORT_HANDLE_NONE, portId);
+    ASSERT_TRUE(equals(requestedConfig, config));
+
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+}
+
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     void TearDown() override;
@@ -1273,6 +1333,12 @@
     status_t addPolicyMix(int mixType, int mixFlag, audio_devices_t deviceType,
             std::string mixAddress, const audio_config_t& audioConfig,
             const std::vector<AudioMixMatchCriterion>& matchCriteria);
+
+    status_t addPolicyMix(const AudioMix& mix);
+
+    status_t removePolicyMixes(const Vector<AudioMix>& mixes);
+
+    std::vector<AudioMix> getRegisteredPolicyMixes();
     void clearPolicyMix();
     void addPolicyMixAndStartInputForLoopback(
             int mixType, int mixFlag, audio_devices_t deviceType, std::string mixAddress,
@@ -1307,9 +1373,14 @@
     AudioMix myAudioMix(matchCriteria, mixType, audioConfig, mixFlag,
             String8(mixAddress.c_str()), 0);
     myAudioMix.mDeviceType = deviceType;
+    myAudioMix.mToken = sp<BBinder>::make();
     // Clear mAudioMix before add new one to make sure we don't add already exist mixes.
     mAudioMixes.clear();
-    mAudioMixes.add(myAudioMix);
+    return addPolicyMix(myAudioMix);
+}
+
+status_t AudioPolicyManagerTestDynamicPolicy::addPolicyMix(const AudioMix& mix) {
+    mAudioMixes.add(mix);
 
     // As the policy mixes registration may fail at some case,
     // caller need to check the returned status.
@@ -1317,6 +1388,20 @@
     return ret;
 }
 
+status_t AudioPolicyManagerTestDynamicPolicy::removePolicyMixes(const Vector<AudioMix>& mixes) {
+    status_t ret = mManager->unregisterPolicyMixes(mixes);
+    return ret;
+}
+
+std::vector<AudioMix> AudioPolicyManagerTestDynamicPolicy::getRegisteredPolicyMixes() {
+    std::vector<AudioMix> audioMixes;
+    if (mManager != nullptr) {
+        status_t ret = mManager->getRegisteredPolicyMixes(audioMixes);
+        EXPECT_EQ(NO_ERROR, ret);
+    }
+    return audioMixes;
+}
+
 void AudioPolicyManagerTestDynamicPolicy::clearPolicyMix() {
     if (mManager != nullptr) {
         mManager->stopInput(mLoopbackInputPortId);
@@ -1470,6 +1555,139 @@
     ASSERT_EQ(INVALID_OPERATION, ret);
 }
 
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        RegisterInvalidMixesDoesNotImpactPriorMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+                               ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                           MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.clear();
+    status_t ret = addPolicyMix(validAudioMix);
+
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(1, registeredMixes.size());
+
+    std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+    AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                             MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    ret = addPolicyMix(invalidAudioMix);
+
+    ASSERT_EQ(INVALID_OPERATION, ret);
+
+    std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(registeredMixes.size(), remainingMixes.size());
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        UnregisterInvalidMixesReturnsError,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api),
+                               ACONFIG_FLAG(android::media::audiopolicy, audio_mix_ownership))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> validMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    AudioMix validAudioMix(validMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                           MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    validAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    mAudioMixes.clear();
+    status_t ret = addPolicyMix(validAudioMix);
+
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> registeredMixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(1, registeredMixes.size());
+
+    std::vector<AudioMixMatchCriterion> invalidMixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUidCriterion(/*uid=*/1235, /*exclude=*/true),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+
+    AudioMix invalidAudioMix(invalidMixMatchCriteria, MIX_TYPE_PLAYERS, audioConfig,
+                             MIX_ROUTE_FLAG_LOOP_BACK, String8(mMixAddress.c_str()), 0);
+    invalidAudioMix.mDeviceType = AUDIO_DEVICE_OUT_REMOTE_SUBMIX;
+
+    Vector<AudioMix> mixes;
+    mixes.add(invalidAudioMix);
+    mixes.add(validAudioMix);
+    ret = removePolicyMixes(mixes);
+
+    ASSERT_EQ(INVALID_OPERATION, ret);
+
+    std::vector<AudioMix> remainingMixes = getRegisteredPolicyMixes();
+    EXPECT_THAT(remainingMixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestDynamicPolicy,
+        GetRegisteredPolicyMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+    std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+    EXPECT_THAT(mixes, IsEmpty());
+}
+
+TEST_F_WITH_FLAGS(AudioPolicyManagerTestDynamicPolicy,
+        AddPolicyMixAndVerifyGetRegisteredPolicyMixes,
+        REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(android::media::audiopolicy, audio_mix_test_api))
+) {
+    audio_config_t audioConfig = AUDIO_CONFIG_INITIALIZER;
+    audioConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    audioConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+    audioConfig.sample_rate = k48000SamplingRate;
+
+    std::vector<AudioMixMatchCriterion> mixMatchCriteria = {
+            createUidCriterion(/*uid=*/42),
+            createUsageCriterion(AUDIO_USAGE_MEDIA, /*exclude=*/true)};
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_LOOP_BACK,
+                                AUDIO_DEVICE_OUT_REMOTE_SUBMIX, mMixAddress, audioConfig,
+                                mixMatchCriteria);
+    ASSERT_EQ(NO_ERROR, ret);
+
+    std::vector<AudioMix> mixes = getRegisteredPolicyMixes();
+    ASSERT_EQ(mixes.size(), 1);
+
+    const AudioMix& mix = mixes[0];
+    ASSERT_EQ(mix.mCriteria.size(), mixMatchCriteria.size());
+    for (uint32_t i = 0; i < mixMatchCriteria.size(); i++) {
+        EXPECT_EQ(mix.mCriteria[i].mRule, mixMatchCriteria[i].mRule);
+        EXPECT_EQ(mix.mCriteria[i].mValue.mUsage, mixMatchCriteria[i].mValue.mUsage);
+    }
+    EXPECT_EQ(mix.mDeviceType, AUDIO_DEVICE_OUT_REMOTE_SUBMIX);
+    EXPECT_EQ(mix.mRouteFlags, MIX_ROUTE_FLAG_LOOP_BACK);
+    EXPECT_EQ(mix.mMixType, MIX_TYPE_PLAYERS);
+    EXPECT_EQ(mix.mFormat.channel_mask, audioConfig.channel_mask);
+    EXPECT_EQ(mix.mFormat.format, audioConfig.format);
+    EXPECT_EQ(mix.mFormat.sample_rate, audioConfig.sample_rate);
+    EXPECT_EQ(mix.mFormat.frame_count, audioConfig.frame_count);
+}
+
 class AudioPolicyManagerTestForHdmi
         : public AudioPolicyManagerTestWithConfigurationFile,
           public testing::WithParamInterface<audio_format_t> {
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 3f6a6e3..abf72e0 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_android_media_audio_framework",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 4efdf8a..1a299c6 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -65,6 +65,7 @@
                         samplingRates="48000"
                         channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
                 </mixPort>
+                <mixPort name="hifi_input" role="sink" />
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -111,6 +112,8 @@
                        sources="primary output,hifi_output,mmap_no_irq_out"/>
                 <route type="mix" sink="mixport_bus_input"
                     sources="BUS Device In"/>
+                <route type="mix" sink="hifi_input"
+                        sources="USB Device In" />
             </routes>
         </module>
 
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
new file mode 100644
index 0000000..73bef43
--- /dev/null
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -0,0 +1,231 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Spatializer_Test"
+
+#include "Spatializer.h"
+
+#include <string>
+#include <unordered_set>
+
+#include <gtest/gtest.h>
+
+#include <android/media/audio/common/AudioLatencyMode.h>
+#include <android/media/audio/common/HeadTracking.h>
+#include <android/media/audio/common/Spatialization.h>
+#include <com_android_media_audio.h>
+#include <utils/Log.h>
+
+using namespace android;
+using media::audio::common::HeadTracking;
+using media::audio::common::Spatialization;
+
+class TestSpatializerPolicyCallback :
+        public SpatializerPolicyCallback {
+public:
+    void onCheckSpatializer() override {};
+};
+
+class SpatializerTest : public ::testing::Test {
+protected:
+    void SetUp() override {
+        const sp<EffectsFactoryHalInterface> effectsFactoryHal
+                = EffectsFactoryHalInterface::create();
+        mSpatializer = Spatializer::create(&mTestCallback, effectsFactoryHal);
+        if (mSpatializer == nullptr) {
+            GTEST_SKIP() << "Skipping Spatializer tests: no spatializer";
+        }
+        std::vector<Spatialization::Level> levels;
+        binder::Status status = mSpatializer->getSupportedLevels(&levels);
+        ASSERT_TRUE(status.isOk());
+        for (auto level : levels) {
+            if (level != Spatialization::Level::NONE) {
+                mSpatializer->setLevel(level);
+                break;
+            }
+        }
+        mSpatializer->setOutput(sTestOutput);
+    }
+
+    void TearDown() override {
+        if (mSpatializer == nullptr) {
+            return;
+        }
+        mSpatializer->setLevel(Spatialization::Level::NONE);
+        mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
+        mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
+        mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+        mSpatializer->updateActiveTracks(0);
+    }
+
+    static constexpr audio_io_handle_t sTestOutput= 1977;
+    static constexpr int sTestSensorHandle = 1980;
+
+    const static inline std::vector<audio_latency_mode_t> sA2DPLatencyModes = {
+        AUDIO_LATENCY_MODE_LOW,
+        AUDIO_LATENCY_MODE_FREE
+    };
+    const static inline std::vector<audio_latency_mode_t> sBLELatencyModes = {
+        AUDIO_LATENCY_MODE_LOW,
+        AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+        AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+        AUDIO_LATENCY_MODE_FREE
+    };
+
+    bool setpUpForHeadtracking() {
+        bool htSupported;
+        mSpatializer->isHeadTrackingSupported(&htSupported);
+        if (!htSupported) {
+            return false;
+        }
+
+        std::vector<HeadTracking::Mode> htModes;
+        mSpatializer->getSupportedHeadTrackingModes(&htModes);
+        for (auto htMode : htModes) {
+            if (htMode != HeadTracking::Mode::DISABLED) {
+                mSpatializer->setDesiredHeadTrackingMode(htMode);
+                break;
+            }
+        }
+
+        mSpatializer->setHeadSensor(sTestSensorHandle);
+        return true;
+    }
+
+    TestSpatializerPolicyCallback mTestCallback;
+    sp<Spatializer> mSpatializer;
+};
+
+TEST_F(SpatializerTest, SupportedA2dpLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping SupportedA2dpLatencyTest: head tracking not supported";
+    }
+    std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    std::vector<audio_latency_mode_t> supportedLatencies =
+            mSpatializer->getSupportedLatencyModes();
+
+    ASSERT_TRUE(supportedLatencies == sA2DPLatencyModes);
+    // Free mode must always be the last of the ordered list
+    ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, SupportedBleLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping SupportedBleLatencyTest: head tracking not supported";
+    }
+    if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+        GTEST_SKIP() << "Skipping SupportedBleLatencyTest: DSA over LE not enabled";
+    }
+    std::vector<audio_latency_mode_t> latencies = sBLELatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    std::vector<audio_latency_mode_t> supportedLatencies =
+            mSpatializer->getSupportedLatencyModes();
+
+    ASSERT_TRUE(supportedLatencies.back() == AUDIO_LATENCY_MODE_FREE);
+    ASSERT_TRUE(std::find(supportedLatencies.begin(), supportedLatencies.end(),
+            AUDIO_LATENCY_MODE_LOW) != supportedLatencies.end());
+
+    std::vector<audio_latency_mode_t> orderedLowLatencyModes =
+        mSpatializer->getOrderedLowLatencyModes();
+
+    std::vector<audio_latency_mode_t> supportedLowLatencyModes;
+    // remove free mode at the end of the supported list to only retain low latency modes
+    std::copy(supportedLatencies.begin(),
+              supportedLatencies.begin() + supportedLatencies.size() - 1,
+              std::back_inserter(supportedLowLatencyModes));
+
+    // Verify that supported low latency modes are always in ordered latency modes list and
+    // in the same order
+    std::vector<audio_latency_mode_t>::iterator lastIt = orderedLowLatencyModes.begin();
+    for (auto latency : supportedLowLatencyModes) {
+        auto it = std::find(orderedLowLatencyModes.begin(), orderedLowLatencyModes.end(), latency);
+        ASSERT_NE(it, orderedLowLatencyModes.end());
+        ASSERT_LE(lastIt, it);
+        lastIt = it;
+    }
+}
+
+TEST_F(SpatializerTest, RequestedA2dpLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping RequestedA2dpLatencyTest: head tracking not supported";
+    }
+
+    std::vector<audio_latency_mode_t> latencies = sA2DPLatencyModes;
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput, std::move(latencies));
+
+    // requested latency mode must be free if no spatialized tracks are active
+    audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+    // requested latency mode must be low if at least one spatialized tracks is active
+    mSpatializer->updateActiveTracks(1);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
+
+    // requested latency mode must be free after stopping the last spatialized tracks
+    mSpatializer->updateActiveTracks(0);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
+
+TEST_F(SpatializerTest, RequestedBleLatencyTest) {
+    if (!setpUpForHeadtracking()) {
+        GTEST_SKIP() << "Skipping RequestedBleLatencyTest: head tracking not supported";
+    }
+    if (!com::android::media::audio::dsa_over_bt_le_audio()) {
+        GTEST_SKIP() << "Skipping RequestedBleLatencyTest: DSA over LE not enabled";
+    }
+
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+            { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+              AUDIO_LATENCY_MODE_FREE });
+
+    // requested latency mode must be free if no spatialized tracks are active
+    audio_latency_mode_t requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+
+    // requested latency mode must be low software if at least one spatialized tracks is active
+    // and the only supported low latency mode is low software
+    mSpatializer->updateActiveTracks(1);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+
+    mSpatializer->onSupportedLatencyModesChangedMsg(sTestOutput,
+            { AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE,
+              AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE,
+              AUDIO_LATENCY_MODE_FREE });
+
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    HeadTracking::ConnectionMode connectionMode = mSpatializer->getHeadtrackingConnectionMode();
+
+    // If low hardware mode is used, the spatializer must use either use one of the sensor
+    // connection tunneled modes.
+    // Otherwise, low software mode must be used
+    if (requestedLatencyMode == AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_HARDWARE) {
+        ASSERT_TRUE(connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_TUNNEL
+                        || connectionMode == HeadTracking::ConnectionMode::DIRECT_TO_SENSOR_SW);
+    } else {
+        ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
+    }
+
+    // requested latency mode must be free after stopping the last spatialized tracks
+    mSpatializer->updateActiveTracks(0);
+    requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
+    ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
+}
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 4883a09..daaeae6 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -74,6 +74,7 @@
         "libstagefright_foundation",
         "libxml2",
         "libyuv",
+        "android.companion.virtual.virtualdevice_aidl-cpp",
         "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.device@1.0",
@@ -101,7 +102,7 @@
         "android.frameworks.cameraservice.device-V2-ndk",
         "android.hardware.camera.common-V1-ndk",
         "android.hardware.camera.device-V3-ndk",
-        "android.hardware.camera.metadata-V2-ndk",
+        "android.hardware.camera.metadata-V3-ndk",
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
         "android.hardware.camera.provider@2.6",
@@ -112,6 +113,7 @@
         "libcameraservice_device_independent",
         "libdynamic_depth",
         "libprocessinfoservice_aidl",
+        "libvirtualdevicebuildflags",
         "media_permission-aidl-cpp",
     ],
 }
@@ -185,16 +187,17 @@
         "aidl/AidlCameraServiceListener.cpp",
         "aidl/AidlUtils.cpp",
         "aidl/DeathPipe.cpp",
+        "utils/AttributionAndPermissionUtils.cpp",
         "utils/CameraServiceProxyWrapper.cpp",
-        "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
         "utils/SchedulingPolicyUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
         "utils/SessionConfigurationUtilsHidl.cpp",
-        "utils/SessionStatsBuilder.cpp",
         "utils/TagMonitor.cpp",
         "utils/LatencyHistogram.cpp",
+        "utils/Utils.cpp",
+        "utils/VirtualDeviceCameraIdMapper.cpp",
     ],
 
     header_libs: [
@@ -226,7 +229,6 @@
         "-Werror",
         "-Wno-ignored-qualifiers",
     ],
-
 }
 
 cc_library_static {
@@ -243,6 +245,7 @@
         "device3/ZoomRatioMapper.cpp",
         "utils/ExifUtils.cpp",
         "utils/SessionConfigurationUtilsHost.cpp",
+        "utils/SessionStatsBuilder.cpp",
     ],
 
     header_libs: [
@@ -260,7 +263,7 @@
         "liblog",
         "libutils",
         "libxml2",
-        "camera_platform_flags_c_lib"
+        "camera_platform_flags_c_lib",
     ],
 
     include_dirs: [
@@ -276,5 +279,4 @@
         "-Werror",
         "-Wno-ignored-qualifiers",
     ],
-
 }
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 725f1eb..b34c268 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -38,7 +38,8 @@
 #include <aidl/AidlCameraService.h>
 #include <android-base/macros.h>
 #include <android-base/parseint.h>
-#include <android/permission/PermissionChecker.h>
+#include <android_companion_virtualdevice_flags.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
 #include <binder/IPCThreadState.h>
@@ -73,6 +74,7 @@
 #include <system/camera_metadata.h>
 #include <binder/IServiceManager.h>
 #include <binder/IActivityManager.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 #include <system/camera.h>
@@ -82,7 +84,6 @@
 #include "api2/CameraDeviceClient.h"
 #include "utils/CameraTraces.h"
 #include "utils/TagMonitor.h"
-#include "utils/CameraThreadState.h"
 #include "utils/CameraServiceProxyWrapper.h"
 #include "utils/SessionConfigurationUtils.h"
 
@@ -92,7 +93,18 @@
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
     const char* kProcessInfoServiceName = "processinfo";
-}; // namespace anonymous
+    const char* kVirtualDeviceBackCameraId = "0";
+    const char* kVirtualDeviceFrontCameraId = "1";
+
+    int32_t getDeviceId(const android::CameraMetadata& cameraInfo) {
+        if (!cameraInfo.exists(ANDROID_INFO_DEVICE_ID)) {
+            return android::kDefaultDeviceId;
+        }
+
+        const auto &deviceIdEntry = cameraInfo.find(ANDROID_INFO_DEVICE_ID);
+        return deviceIdEntry.data.i32[0];
+    }
+} // namespace anonymous
 
 namespace android {
 
@@ -100,6 +112,7 @@
 using namespace camera3::SessionConfigurationUtils;
 
 using binder::Status;
+using companion::virtualnative::IVirtualDeviceManagerNative;
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using frameworks::cameraservice::service::implementation::AidlCameraService;
 using hardware::ICamera;
@@ -109,7 +122,9 @@
 using hardware::camera2::ICameraInjectionSession;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
 using hardware::camera2::utils::ConcurrentCameraIdCombination;
+
 namespace flags = com::android::internal::camera::flags;
+namespace vd_flags = android::companion::virtualdevice::flags;
 
 // ----------------------------------------------------------------------------
 // Logging support -- this is for debugging only
@@ -129,18 +144,16 @@
 
 // ----------------------------------------------------------------------------
 
-static const std::string sDumpPermission("android.permission.DUMP");
-static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
-static const std::string sCameraPermission("android.permission.CAMERA");
-static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
-static const std::string sCameraHeadlessSystemUserPermission(
-        "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
-static const std::string
-        sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
-static const std::string sCameraOpenCloseListenerPermission(
-        "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
-static const std::string
-        sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+// Permission strings (references to AttributionAndPermissionUtils for brevity)
+static const std::string &sDumpPermission =
+        AttributionAndPermissionUtils::sDumpPermission;
+static const std::string &sManageCameraPermission =
+        AttributionAndPermissionUtils::sManageCameraPermission;
+static const std::string &sCameraSendSystemEventsPermission =
+        AttributionAndPermissionUtils::sCameraSendSystemEventsPermission;
+static const std::string &sCameraInjectExternalCameraPermission =
+        AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission;
+
 // Constant integer for FGS Logging, used to denote the API type for logger
 static const int LOG_FGS_CAMERA_API = 1;
 const char *sFileName = "lastOpenSessionDumpFile";
@@ -156,7 +169,11 @@
 static std::set<std::string> sServiceErrorEventSet;
 
 CameraService::CameraService(
-        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
+        std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils) :
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils == nullptr ?
+                std::make_shared<AttributionAndPermissionUtils>()\
+                : attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper == nullptr ?
                 std::make_shared<CameraServiceProxyWrapper>() : cameraServiceProxyWrapper),
         mEventLog(DEFAULT_EVENT_LOG_LENGTH),
@@ -165,6 +182,7 @@
         mSoundRef(0), mInitialized(false),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
     ALOGI("CameraService started (pid=%d)", getpid());
+    mAttributionAndPermissionUtils->setCameraService(this);
     mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
     mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
     if (mMemFd == -1) {
@@ -172,12 +190,6 @@
     }
 }
 
-// The word 'System' here does not refer to clients only on the system
-// partition. They just need to have a android system uid.
-static bool doesClientHaveSystemUid() {
-    return (CameraThreadState::getCallingUid() < AID_APP_START);
-}
-
 // Enable processes with isolated AID to request the binder
 void CameraService::instantiate() {
     CameraService::publish(true);
@@ -194,7 +206,6 @@
 
 void CameraService::onFirstRef()
 {
-
     ALOGI("CameraService process starting");
 
     BnCameraService::onFirstRef();
@@ -213,7 +224,7 @@
 
     mUidPolicy = new UidPolicy(this);
     mUidPolicy->registerSelf();
-    mSensorPrivacyPolicy = new SensorPrivacyPolicy(this);
+    mSensorPrivacyPolicy = new SensorPrivacyPolicy(this, mAttributionAndPermissionUtils);
     mSensorPrivacyPolicy->registerSelf();
     mInjectionStatusListener = new InjectionStatusListener(this);
 
@@ -266,7 +277,6 @@
             }
         }
 
-
         // Setup vendor tags before we call get_camera_info the first time
         // because HAL might need to setup static vendor keys in get_camera_info
         // TODO: maybe put this into CameraProviderManager::initialize()?
@@ -284,7 +294,6 @@
         deviceIds = mCameraProviderManager->getCameraDeviceIds(&unavailPhysicalIds);
     }
 
-
     for (auto& cameraId : deviceIds) {
         if (getCameraState(cameraId) == nullptr) {
             onDeviceStatusChanged(cameraId, CameraDeviceStatus::PRESENT);
@@ -313,6 +322,10 @@
 
 void CameraService::broadcastTorchModeStatus(const std::string& cameraId, TorchModeStatus status,
         SystemCameraKind systemCameraKind) {
+    // Get the device id and app-visible camera id for the given HAL-visible camera id.
+    auto [deviceId, mappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
         if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
@@ -321,18 +334,29 @@
                     __FUNCTION__, cameraId.c_str());
             continue;
         }
+
         auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
-                cameraId);
+                mappedCameraId, deviceId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
-        // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
-        // for the specific package that this listener belongs to.
-        std::vector<std::string> remappedCameraIds =
-                findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
-        for (auto& remappedCameraId : remappedCameraIds) {
-            ret = i->getListener()->onTorchStatusChanged(mapToInterface(status), remappedCameraId);
-            i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
-                    __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
+
+        // Only cameras of the default device can be remapped to a different camera (using
+        // remapCameraIds method), so do the following only if the camera is associated with the
+        // default device.
+        if (deviceId == kDefaultDeviceId) {
+            // For the default device, also trigger the torch callbacks for cameras that were
+            // remapped to the current cameraId for the specific package that this listener belongs
+            // to.
+            std::vector<std::string> remappedCameraIds =
+                    findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
+            for (auto &remappedCameraId: remappedCameraIds) {
+                ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
+                        remappedCameraId, kDefaultDeviceId);
+                i->handleBinderStatus(ret,
+                        "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
+                        __FUNCTION__, i->getListenerUid(), i->getListenerPid(),
+                        ret.exceptionCode());
+            }
         }
     }
 }
@@ -351,10 +375,28 @@
 void CameraService::filterAPI1SystemCameraLocked(
         const std::vector<std::string> &normalDeviceIds) {
     mNormalDeviceIdsWithoutSystemCamera.clear();
-    for (auto &deviceId : normalDeviceIds) {
+    for (auto &cameraId : normalDeviceIds) {
+        if (vd_flags::camera_device_awareness()) {
+            CameraMetadata cameraInfo;
+            status_t res = mCameraProviderManager->getCameraCharacteristics(
+                    cameraId, false, &cameraInfo, false);
+            int32_t deviceId = kDefaultDeviceId;
+            if (res != OK) {
+                ALOGW("%s: Not able to get camera characteristics for camera id %s",
+                      __FUNCTION__, cameraId.c_str());
+            } else {
+                deviceId = getDeviceId(cameraInfo);
+            }
+            // Cameras associated with non-default device id's (i.e., virtual cameras) can never be
+            // system cameras, so skip for non-default device id's.
+            if (deviceId != kDefaultDeviceId) {
+                continue;
+            }
+        }
+
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-        if (getSystemCameraKind(deviceId, &deviceKind) != OK) {
-            ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, deviceId.c_str());
+        if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+            ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
             continue;
         }
         if (deviceKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
@@ -362,7 +404,7 @@
             // device ids as per the HAL interface contract.
             break;
         }
-        mNormalDeviceIdsWithoutSystemCamera.push_back(deviceId);
+        mNormalDeviceIdsWithoutSystemCamera.push_back(cameraId);
     }
     ALOGV("%s: number of API1 compatible public cameras is %zu", __FUNCTION__,
               mNormalDeviceIdsWithoutSystemCamera.size());
@@ -516,10 +558,10 @@
     if (newStatus == StatusInternal::NOT_PRESENT) {
         logDeviceRemoved(cameraId, fmt::format("Device status changed from {} to {}",
                 oldStatus, newStatus));
-
         // Set the device status to NOT_PRESENT, clients will no longer be able to connect
         // to this device until the status changes
         updateStatus(StatusInternal::NOT_PRESENT, cameraId);
+        mVirtualDeviceCameraIdMapper.removeCamera(cameraId);
 
         sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
         {
@@ -601,7 +643,7 @@
                 continue;
             }
             auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
-                    mapToInterface(newStatus), id, physicalId);
+                    mapToInterface(newStatus), id, physicalId, kDefaultDeviceId);
             listener->handleBinderStatus(ret,
                     "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
                     __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -635,7 +677,6 @@
     onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
-
 void CameraService::onTorchStatusChanged(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     Mutex::Autolock al(mTorchStatusMutex);
@@ -644,9 +685,14 @@
 
 void CameraService::broadcastTorchStrengthLevel(const std::string& cameraId,
         int32_t newStrengthLevel) {
+    // Get the device id and app-visible camera id for the given HAL-visible camera id.
+    auto [deviceId, mappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
-        auto ret = i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
+        auto ret = i->getListener()->onTorchStrengthLevelChanged(mappedCameraId,
+                newStrengthLevel, deviceId);
         i->handleBinderStatus(ret,
                 "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
                 i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
@@ -706,34 +752,7 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool isAutomotiveDevice() {
-    // Checks the property ro.hardware.type and returns true if it is
-    // automotive.
-    char value[PROPERTY_VALUE_MAX] = {0};
-    property_get("ro.hardware.type", value, "");
-    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isHeadlessSystemUserMode() {
-    // Checks if the device is running in headless system user mode
-    // by checking the property ro.fw.mu.headless_system_user.
-    char value[PROPERTY_VALUE_MAX] = {0};
-    property_get("ro.fw.mu.headless_system_user", value, "");
-    return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
-}
-
-static bool isAutomotivePrivilegedClient(int32_t uid) {
-    // Returns false if this is not an automotive device type.
-    if (!isAutomotiveDevice())
-        return false;
-
-    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
-    // privileged client uid used for safety critical use cases such as
-    // rear view and surround view.
-    return uid == AID_AUTOMOTIVE_EVS;
-}
-
-bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const {
     // Returns false if this is not an automotive device type.
     if (!isAutomotiveDevice())
         return false;
@@ -778,54 +797,19 @@
     return true;
 }
 
-bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
-        const AttributionSourceState& attributionSource, const std::string& message,
-        int32_t attributedOpCode) const{
-    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
-        // If cameraId is empty, then it means that this check is not used for the
-        // purpose of accessing a specific camera, hence grant permission just
-        // based on uid to the automotive privileged client.
-        if (cameraId.empty())
-            return true;
-        // If this call is used for accessing a specific camera then cam_id must be provided.
-        // In that case, only pre-grants the permission for accessing the exterior system only
-        // camera.
-        return isAutomotiveExteriorSystemCamera(cameraId);
+Status CameraService::getNumberOfCameras(int32_t type, int32_t deviceId, int32_t devicePolicy,
+        int32_t* numCameras) {
+    ATRACE_CALL();
+    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+            && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(deviceId);
+        return Status::ok();
     }
 
-    permission::PermissionChecker permissionChecker;
-    return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
-            toString16(message), attributedOpCode)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-}
-
-bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
-        int callingUid) const{
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = checkPermission(cameraId,
-            sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
-    bool checkPermissionForCamera = checkPermission(cameraId,
-            sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
-    return checkPermissionForSystemCamera && checkPermissionForCamera;
-}
-
-bool CameraService::hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
-        int callingPid, int callingUid) const{
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    return checkPermission(cameraId, sCameraHeadlessSystemUserPermission, attributionSource,
-            std::string(), AppOpsManager::OP_NONE);
-}
-
-Status CameraService::getNumberOfCameras(int32_t type, int32_t* numCameras) {
-    ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(std::string(), CameraThreadState::getCallingPid(),
-                    CameraThreadState::getCallingUid());
+            hasPermissionsForSystemCamera(std::string(), getCallingPid(),
+                    getCallingUid());
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
             if (hasSystemCameraPermissions) {
@@ -852,8 +836,8 @@
 
 Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
     if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = CameraThreadState::getCallingPid();
-        const int uid = CameraThreadState::getCallingUid();
+        const int pid = getCallingPid();
+        const int uid = getCallingUid();
         ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
                 __FUNCTION__, pid, uid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
@@ -869,6 +853,7 @@
 }
 
 Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
+        int32_t deviceId, int32_t devicePolicy,
         /* out */
         hardware::camera2::impl::CameraMetadataNative* request) {
     ATRACE_CALL();
@@ -883,8 +868,15 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, getCallingUid());
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
 
     binder::Status res;
     if (request == nullptr) {
@@ -906,13 +898,6 @@
                 "request for system only device %s: ", cameraId.c_str());
     }
 
-    // Check for camera permissions
-    if (!hasCameraPermissions()) {
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "android.permission.CAMERA needed to call"
-                "createDefaultRequest");
-    }
-
     CameraMetadata metadata;
     status_t err = mCameraProviderManager->createDefaultRequest(cameraId, tempId, &metadata);
     if (err == OK) {
@@ -930,10 +915,10 @@
 }
 
 Status CameraService::isSessionConfigurationWithParametersSupported(
-        const std::string& unresolvedCameraId,
+        const std::string& unresolvedCameraId, int targetSdkVersion,
         const SessionConfiguration& sessionConfiguration,
-        /*out*/
-        bool* supported) {
+        int32_t deviceId, int32_t devicePolicy,
+        /*out*/ bool* supported) {
     ATRACE_CALL();
 
     if (!flags::feature_combination_query()) {
@@ -946,8 +931,16 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, getCallingUid());
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (supported == nullptr) {
         std::string msg = fmt::sprintf("Camera %s: Invalid 'support' input!",
                 unresolvedCameraId.c_str());
@@ -961,41 +954,216 @@
                 cameraId.c_str());
     }
 
-    // Check for camera permissions
-    if (!hasCameraPermissions()) {
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "android.permission.CAMERA needed to call"
-                "isSessionConfigurationWithParametersSupported");
-    }
+    bool overrideForPerfClass = flags::calculate_perf_override_during_session_support() &&
+                                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+                                        mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
 
+    return isSessionConfigurationWithParametersSupportedUnsafe(cameraId, sessionConfiguration,
+                                                               overrideForPerfClass, supported);
+}
+
+Status CameraService::isSessionConfigurationWithParametersSupportedUnsafe(
+        const std::string& cameraId, const SessionConfiguration& sessionConfiguration,
+        bool overrideForPerfClass, /*out*/ bool* supported) {
     *supported = false;
-    status_t ret = mCameraProviderManager->isSessionConfigurationSupported(cameraId.c_str(),
-            sessionConfiguration, /*mOverrideForPerfClass*/false, /*checkSessionParams*/true,
-            supported);
+    status_t ret = mCameraProviderManager->isSessionConfigurationSupported(
+            cameraId, sessionConfiguration, overrideForPerfClass,
+            /*checkSessionParams=*/true, supported);
     binder::Status res;
     switch (ret) {
         case OK:
-            // Expected, do nothing.
+            // Expected. Do Nothing.
+            return Status::ok();
+        case INVALID_OPERATION: {
+                std::string msg = fmt::sprintf(
+                        "Camera %s: Session configuration with parameters supported query not "
+                        "supported!",
+                        cameraId.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+                *supported = false;
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+            }
+            break;
+        case NAME_NOT_FOUND: {
+                std::string msg = fmt::sprintf("Camera %s: Unknown camera ID.", cameraId.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+                *supported = false;
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
+            break;
+        default: {
+                std::string msg = fmt::sprintf(
+                        "Unable to retrieve session configuration support for camera "
+                        "device %s: Error: %s (%d)",
+                        cameraId.c_str(), strerror(-ret), ret);
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+                *supported = false;
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
+            break;
+    }
+}
+
+Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
+        int targetSdkVersion, bool overrideToPortrait,
+        const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
+        /*out*/ CameraMetadata* outMetadata) {
+    ATRACE_CALL();
+
+    if (outMetadata == nullptr) {
+        std::string msg =
+                fmt::sprintf("Camera %s: Invalid 'outMetadata' input!", unresolvedCameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+
+    if (!mInitialized) {
+        ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
+        logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
+        return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
+    }
+
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+                                                                  devicePolicy, getCallingUid());
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                                       unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
+    if (shouldRejectSystemCameraConnection(cameraId)) {
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                                "Unable to retrieve camera"
+                                "characteristics for system only device %s: ",
+                                cameraId.c_str());
+    }
+
+    bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+            mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+    if (flags::check_session_support_before_session_char()) {
+        bool sessionConfigSupported;
+        Status res = isSessionConfigurationWithParametersSupportedUnsafe(
+                cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
+        if (!res.isOk()) {
+            // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
+            // report the correct Status to send to the client. Simply forward the error to
+            // the client.
+            outMetadata->clear();
+            return res;
+        }
+        if (!sessionConfigSupported) {
+            std::string msg = fmt::sprintf(
+                    "Session configuration not supported for camera device %s.", cameraId.c_str());
+            outMetadata->clear();
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+    }
+
+    status_t ret = mCameraProviderManager->getSessionCharacteristics(
+            cameraId, sessionConfiguration, overrideForPerfClass, overrideToPortrait, outMetadata);
+
+    switch (ret) {
+        case OK:
+            // Expected, no handling needed.
             break;
         case INVALID_OPERATION: {
                 std::string msg = fmt::sprintf(
-                        "Camera %s: Session configuration query not supported!",
+                        "Camera %s: Session characteristics query not supported!",
                         cameraId.c_str());
-                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+                outMetadata->clear();
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
             }
-
+            break;
+        case NAME_NOT_FOUND: {
+                std::string msg = fmt::sprintf(
+                        "Camera %s: Unknown camera ID.",
+                        cameraId.c_str());
+                ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+                logServiceError(msg, CameraService::ERROR_ILLEGAL_ARGUMENT);
+                outMetadata->clear();
+                return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+            }
             break;
         default: {
-                std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", cameraId.c_str(),
-                        strerror(-ret), ret);
+                std::string msg = fmt::sprintf(
+                        "Unable to retrieve session characteristics for camera device %s: "
+                        "Error: %s (%d)",
+                        cameraId.c_str(), strerror(-ret), ret);
                 ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        msg.c_str());
+                logServiceError(msg, CameraService::ERROR_INVALID_OPERATION);
+                outMetadata->clear();
+                return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
             }
     }
 
-    return res;
+    return filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+}
+
+Status CameraService::filterSensitiveMetadataIfNeeded(
+        const std::string& cameraId, CameraMetadata* metadata) {
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
+
+    if (callingPid == getpid()) {
+        // Caller is cameraserver; no need to remove keys
+        return Status::ok();
+    }
+
+    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+    if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+        ALOGE("%s: Couldn't get camera kind for camera id %s", __FUNCTION__, cameraId.c_str());
+        metadata->clear();
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                                "Unable to retrieve camera kind for device %s", cameraId.c_str());
+    }
+    if (deviceKind == SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        // Attempting to query system only camera without system camera permission would have
+        // failed the shouldRejectSystemCameraConnection in the caller. So if we get here
+        // for a system only camera, then the caller has the required permission.
+        // No need to remove keys
+        return Status::ok();
+    }
+
+    std::vector<int32_t> tagsRemoved;
+    // Get the device id that owns this camera.
+    auto [cameraOwnerDeviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(
+            cameraId);
+    bool hasCameraPermission = hasPermissionsForCamera(cameraId, callingPid, callingUid,
+            cameraOwnerDeviceId);
+    if (hasCameraPermission) {
+        // Caller has camera permission; no need to remove keys
+        return Status::ok();
+    }
+
+    status_t ret = metadata->removePermissionEntries(
+            mCameraProviderManager->getProviderTagIdLocked(cameraId), &tagsRemoved);
+    if (ret != OK) {
+        metadata->clear();
+        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+                                "Failed to remove camera characteristics needing camera permission "
+                                "for device %s:%s (%d)",
+                                cameraId.c_str(), strerror(-ret), ret);
+    }
+
+    if (!tagsRemoved.empty()) {
+        ret = metadata->update(ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
+                                  tagsRemoved.data(), tagsRemoved.size());
+        if (ret != OK) {
+            metadata->clear();
+            return STATUS_ERROR_FMT(
+                    ERROR_INVALID_OPERATION,
+                    "Failed to insert camera keys needing permission for device %s: %s (%d)",
+                    cameraId.c_str(), strerror(-ret), ret);
+        }
+    }
+    return Status::ok();
 }
 
 Status CameraService::parseCameraIdRemapping(
@@ -1003,7 +1171,7 @@
         /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
     std::string packageName;
     std::string cameraIdToReplace, updatedCameraId;
-    for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
+    for (const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
         packageName = packageIdRemapping.packageName;
         if (packageName.empty()) {
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
@@ -1015,7 +1183,7 @@
                     "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
                     packageName.c_str());
         }
-        for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
+        for (size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
             cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
             updatedCameraId = packageIdRemapping.updatedCameraIds[i];
             if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
@@ -1029,6 +1197,22 @@
                         " as updatedCameraId for %s",
                         packageName.c_str());
             }
+
+            // Do not allow any camera remapping that involves a virtual camera.
+            auto [deviceIdForCameraToReplace, _] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(
+                            cameraIdToReplace);
+            if (deviceIdForCameraToReplace != kDefaultDeviceId) {
+                return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: CameraIdToReplace cannot be a virtual camera");
+            }
+            [[maybe_unused]] auto [deviceIdForUpdatedCamera, unusedMappedCameraId] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(updatedCameraId);
+            if (deviceIdForUpdatedCamera != kDefaultDeviceId) {
+                return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                        "CameraIdRemapping: UpdatedCameraId cannot be a virtual camera");
+            }
+
             (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
         }
     }
@@ -1073,7 +1257,7 @@
     mServiceLock.unlock();
 
     // Clear calling identity for disconnect() PID checks.
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
 
     // Disconnect clients.
     for (auto& clientSp : clientsToDisconnect) {
@@ -1085,7 +1269,7 @@
     // Invoke destructors (which call disconnect()) now while we don't hold the mServiceLock.
     clientsToDisconnect.clear();
 
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
     mServiceLock.lock();
 
     {
@@ -1097,17 +1281,24 @@
 }
 
 Status CameraService::injectSessionParams(
-            const std::string& cameraId,
-            const CameraMetadata& sessionParams) {
-   if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = CameraThreadState::getCallingPid();
-        const int uid = CameraThreadState::getCallingUid();
+        const std::string& cameraId,
+        const CameraMetadata& sessionParams) {
+    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
+        const int pid = getCallingPid();
+        const int uid = getCallingUid();
         ALOGE("%s: Permission Denial: can't inject session params pid=%d, uid=%d",
                 __FUNCTION__, pid, uid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "Permission Denial: no permission to inject session params");
     }
 
+    // Do not allow session params injection for a virtual camera.
+    auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+    if (deviceId != kDefaultDeviceId) {
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
+                "Cannot inject session params for a virtual camera");
+    }
+
     std::unique_ptr<AutoConditionLock> serviceLockWrapper =
             AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
 
@@ -1173,13 +1364,40 @@
     return inputCameraId;
 }
 
-Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait,
-        CameraInfo* cameraInfo) {
+std::optional<std::string> CameraService::resolveCameraId(
+        const std::string& inputCameraId,
+        int32_t deviceId,
+        int32_t devicePolicy,
+        int clientUid,
+        const std::string& packageName) {
+    if ((deviceId == kDefaultDeviceId)
+            || (devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+        auto [storedDeviceId, _] =
+                mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(inputCameraId);
+        if (storedDeviceId != kDefaultDeviceId) {
+            // Trying to access a virtual camera from default-policy device context, we should fail.
+            std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                    inputCameraId.c_str(), deviceId);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return std::nullopt;
+        }
+        return resolveCameraId(inputCameraId, clientUid, packageName);
+    }
+
+    return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
+}
+
+Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait, int32_t deviceId,
+        int32_t devicePolicy, CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    std::string unresolvedCameraId = cameraIdIntToStrLocked(cameraId);
-    std::string cameraIdStr = resolveCameraId(
-            unresolvedCameraId, CameraThreadState::getCallingUid());
+    std::string cameraIdStr = cameraIdIntToStrLocked(cameraId, deviceId, devicePolicy);
+    if (cameraIdStr.empty()) {
+        std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
+                cameraId, deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
 
     if (shouldRejectSystemCameraConnection(cameraIdStr)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
@@ -1192,7 +1410,7 @@
                 "Camera subsystem is not available");
     }
     bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraId),
-            CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
+            getCallingPid(), getCallingUid());
     int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
     if (hasSystemCameraPermissions) {
         cameraIdBound = mNumberOfCameras;
@@ -1217,40 +1435,44 @@
     return ret;
 }
 
-std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt) {
-    const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
-    auto callingPid = CameraThreadState::getCallingPid();
-    auto callingUid = CameraThreadState::getCallingUid();
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
-                sSystemCameraPermission, attributionSource, std::string(),
-                AppOpsManager::OP_NONE);
-    if (checkPermissionForSystemCamera || getpid() == callingPid) {
-        deviceIds = &mNormalDeviceIds;
+std::string CameraService::cameraIdIntToStrLocked(int cameraIdInt,
+        int32_t deviceId, int32_t devicePolicy) {
+    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+            && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
+        std::optional<std::string> cameraIdOptional =
+                mVirtualDeviceCameraIdMapper.getActualCameraId(cameraIdInt, deviceId);
+        return cameraIdOptional.has_value() ? cameraIdOptional.value() : std::string{};
     }
-    if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(deviceIds->size())) {
-        ALOGE("%s: input id %d invalid: valid range  (0, %zu)",
-                __FUNCTION__, cameraIdInt, deviceIds->size());
+
+    const std::vector<std::string> *cameraIds = &mNormalDeviceIdsWithoutSystemCamera;
+    auto callingPid = getCallingPid();
+    auto callingUid = getCallingUid();
+    bool systemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraIdInt),
+            callingPid, callingUid, /* checkCameraPermissions= */ false);
+    if (systemCameraPermissions || getpid() == callingPid) {
+        cameraIds = &mNormalDeviceIds;
+    }
+    if (cameraIdInt < 0 || cameraIdInt >= static_cast<int>(cameraIds->size())) {
+        ALOGE("%s: input id %d invalid: valid range (0, %zu)",
+                __FUNCTION__, cameraIdInt, cameraIds->size());
         return std::string{};
     }
 
-    return (*deviceIds)[cameraIdInt];
+    std::string unresolvedCameraId = (*cameraIds)[cameraIdInt];
+    return resolveCameraId(unresolvedCameraId, getCallingUid());
 }
 
-std::string CameraService::cameraIdIntToStr(int cameraIdInt) {
+std::string CameraService::cameraIdIntToStr(int cameraIdInt, int32_t deviceId,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
-    return cameraIdIntToStrLocked(cameraIdInt);
+    return cameraIdIntToStrLocked(cameraIdInt, deviceId, devicePolicy);
 }
 
 Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
+        int targetSdkVersion, bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
+        CameraMetadata* cameraInfo) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
-
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -1263,13 +1485,21 @@
                 "Camera subsystem is not available");;
     }
 
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, getCallingUid());
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
                 "characteristics for system only device %s: ", cameraId.c_str());
     }
 
-    Status ret{};
-
     bool overrideForPerfClass =
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraId, targetSdkVersion);
@@ -1288,63 +1518,31 @@
                     strerror(-res), res);
         }
     }
-    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-    if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
-        return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera kind "
-                "for device %s", cameraId.c_str());
-    }
-    int callingPid = CameraThreadState::getCallingPid();
-    int callingUid = CameraThreadState::getCallingUid();
-    std::vector<int32_t> tagsRemoved;
-    // If it's not calling from cameraserver, check the permission only if
-    // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
-    // it would've already been checked in shouldRejectSystemCameraConnection.
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
-            attributionSource, std::string(), AppOpsManager::OP_NONE);
-    if ((callingPid != getpid()) &&
-            (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
-            !checkPermissionForCamera) {
-        res = cameraInfo->removePermissionEntries(
-                mCameraProviderManager->getProviderTagIdLocked(cameraId),
-                &tagsRemoved);
-        if (res != OK) {
-            cameraInfo->clear();
-            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to remove camera"
-                    " characteristics needing camera permission for device %s: %s (%d)",
-                    cameraId.c_str(), strerror(-res), res);
-        }
-    }
 
-    if (!tagsRemoved.empty()) {
-        res = cameraInfo->update(ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
-                tagsRemoved.data(), tagsRemoved.size());
-        if (res != OK) {
-            cameraInfo->clear();
-            return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to insert camera "
-                    "keys needing permission for device %s: %s (%d)", cameraId.c_str(),
-                    strerror(-res), res);
-        }
-    }
-
-    return ret;
+    return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
 }
 
-Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t* torchStrength) {
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId, int32_t deviceId,
+        int32_t devicePolicy, int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    const std::string cameraId = resolveCameraId(
-        unresolvedCameraId, CameraThreadState::getCallingUid());
+
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, getCallingUid());
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
     }
 
-    if(torchStrength == NULL) {
+    if (torchStrength == NULL) {
         ALOGE("%s: strength level must not be null.", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Strength level should not be null.");
     }
@@ -1490,7 +1688,7 @@
     if (effectiveApiLevel == API_1) { // Camera1 API route
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
         *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
-                packageName, featureId, cameraId,
+                cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
                 api1CameraId, facing, sensorOrientation,
                 clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
                 forceSlowJpegMode);
@@ -1500,7 +1698,8 @@
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
         *client = new CameraDeviceClient(cameraService, tmp,
-                cameraService->mCameraServiceProxyWrapper, packageName, systemNativeClient,
+                cameraService->mCameraServiceProxyWrapper,
+                cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
                 overrideForPerfClass, overrideToPortrait, originalCameraId);
         ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
@@ -1583,7 +1782,7 @@
 }
 
 Status CameraService::initializeShimMetadata(int cameraId) {
-    int uid = CameraThreadState::getCallingUid();
+    int uid = getCallingUid();
 
     std::string cameraIdStr = std::to_string(cameraId);
     Status ret = Status::ok();
@@ -1615,7 +1814,7 @@
 
     std::string unresolvedCameraId = std::to_string(cameraId);
     std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+            getCallingUid());
 
     // Check if we already have parameters
     {
@@ -1634,9 +1833,9 @@
         }
     }
 
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
     ret = initializeShimMetadata(cameraId);
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
     if (!ret.isOk()) {
         // Error already logged by callee
         return ret;
@@ -1664,38 +1863,6 @@
     return STATUS_ERROR(ERROR_INVALID_OPERATION, "Unable to initialize legacy parameters");
 }
 
-// Can camera service trust the caller based on the calling UID?
-static bool isTrustedCallingUid(uid_t uid) {
-    switch (uid) {
-        case AID_MEDIA:        // mediaserver
-        case AID_CAMERASERVER: // cameraserver
-        case AID_RADIO:        // telephony
-            return true;
-        default:
-            return false;
-    }
-}
-
-static status_t getUidForPackage(const std::string &packageName, int userId, /*inout*/uid_t& uid,
-        int err) {
-    PermissionController pc;
-    uid = pc.getPackageUid(toString16(packageName), 0);
-    if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", packageName.c_str());
-        dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
-        return BAD_VALUE;
-    }
-
-    if (userId < 0) {
-        ALOGE("Invalid user: %d", userId);
-        dprintf(err, "Invalid user: %d\n", userId);
-        return BAD_VALUE;
-    }
-
-    uid = multiuser_get_uid(userId, uid);
-    return NO_ERROR;
-}
-
 Status CameraService::validateConnectLocked(const std::string& cameraId,
         const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
         /*out*/int& originalClientPid) const {
@@ -1713,7 +1880,7 @@
     }
 #endif  // __BRILLO__
 
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
 
     if (!mInitialized) {
         ALOGE("CameraService::connect X (PID %d) rejected (camera HAL module not loaded)",
@@ -1747,10 +1914,8 @@
 Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
         const std::string& clientName, int& clientUid, int& clientPid,
         /*out*/int& originalClientPid) const {
-    AttributionSourceState attributionSource{};
-
-    int callingPid = CameraThreadState::getCallingPid();
-    int callingUid = CameraThreadState::getCallingUid();
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
 
     // Check if we can trust clientUid
     if (clientUid == USE_CALLING_UID) {
@@ -1762,7 +1927,7 @@
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
                 callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientUid, clientPid);
+                clientName.c_str(), clientPid, clientUid);
     }
 
     // Check if we can trust clientPid
@@ -1775,7 +1940,7 @@
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
                 callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientUid, clientPid);
+                clientName.c_str(), clientPid, clientUid);
     }
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
@@ -1789,23 +1954,22 @@
         ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "No camera device with ID \"%s\""
                 "found while trying to query device kind", cameraId.c_str());
-
     }
 
+    // Get the device id that owns this camera.
+    auto [deviceId, _] = mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     // If it's not calling from cameraserver, check the permission if the
     // device isn't a system only camera (shouldRejectSystemCameraConnection already checks for
     // android.permission.SYSTEM_CAMERA for system only camera devices).
-    attributionSource.pid = clientPid;
-    attributionSource.uid = clientUid;
-    attributionSource.packageName = clientName;
-    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
-            std::string(), AppOpsManager::OP_NONE);
+    bool checkPermissionForCamera =
+            hasPermissionsForCamera(cameraId, clientPid, clientUid, clientName, deviceId);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
-                clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
     // Make sure the UID is in an active state to use the camera
@@ -1816,7 +1980,7 @@
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" from background ("
                 "calling UID %d proc state %" PRId32 ")",
-                clientName.c_str(), clientUid, clientPid, cameraId.c_str(),
+                clientName.c_str(), clientPid, clientUid, cameraId.c_str(),
                 callingUid, procState);
     }
 
@@ -1829,7 +1993,7 @@
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
-                "is enabled", clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
     // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
@@ -1841,7 +2005,7 @@
 
     // For non-system clients : Only allow clients who are being used by the current foreground
     // device user, unless calling from our own process.
-    if (!doesClientHaveSystemUid() && callingPid != getpid() &&
+    if (!callerHasSystemUid() && callingPid != getpid() &&
             (mAllowedUsers.find(clientUserId) == mAllowedUsers.end())) {
         ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from "
                 "device user %d, currently allowed device users: %s)", callingPid, clientUserId,
@@ -1855,13 +2019,14 @@
         // If the System User tries to access the camera when the device is running in
         // headless system user mode, ensure that client has the required permission
         // CAMERA_HEADLESS_SYSTEM_USER.
-        if (isHeadlessSystemUserMode() && (clientUserId == USER_SYSTEM) &&
-                !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
+        if (isHeadlessSystemUserMode()
+                && (clientUserId == USER_SYSTEM)
+                && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
             ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                     "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
                     User without camera headless system user permission",
-                    clientName.c_str(), clientUid, clientPid, cameraId.c_str());
+                    clientName.c_str(), clientPid, clientUid, cameraId.c_str());
         }
     }
 
@@ -1870,7 +2035,7 @@
 
 status_t CameraService::checkIfDeviceIsUsable(const std::string& cameraId) const {
     auto cameraState = getCameraState(cameraId);
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     if (cameraState == nullptr) {
         ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid,
                 cameraId.c_str());
@@ -1968,7 +2133,7 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
-        if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+        if (!binder && isAutomotivePrivilegedClient(getCallingUid())) {
             // If processinfo service is not available and the client is automotive privileged
             // client used for safety critical uses cases such as rear-view and surround-view which
             // needs to be available before android boot completes, then use the hardcoded values
@@ -2101,7 +2266,7 @@
     mServiceLock.unlock();
 
     // Clear caller identity temporarily so client disconnect PID checks work correctly
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
 
     // Destroy evicted clients
     for (auto& i : evictedClients) {
@@ -2109,7 +2274,7 @@
         i->getValue()->disconnect(); // Clients will remove themselves from the active client list
     }
 
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
 
     for (const auto& i : evictedClients) {
         ALOGV("%s: Waiting for disconnect to complete for client for device %s (PID %" PRId32 ")",
@@ -2152,15 +2317,20 @@
         int targetSdkVersion,
         bool overrideToPortrait,
         bool forceSlowJpegMode,
+        int32_t deviceId,
+        int32_t devicePolicy,
         /*out*/
         sp<ICamera>* device) {
-
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    std::string unresolvedCameraId = cameraIdIntToStr(api1CameraId);
-    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+    std::string cameraIdStr = cameraIdIntToStr(api1CameraId, deviceId, devicePolicy);
+    if (cameraIdStr.empty()) {
+        std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
+                api1CameraId, deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
 
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
@@ -2168,9 +2338,8 @@
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
             overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
 
-    if(!ret.isOk()) {
-        logRejected(cameraIdStr, CameraThreadState::getCallingPid(), clientPackageName,
-                toStdString(ret.toString8()));
+    if (!ret.isOk()) {
+        logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2180,8 +2349,8 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            getCallingUid(),
+            getCallingPid());
     }
 
     return ret;
@@ -2211,9 +2380,9 @@
     //    and the serving thread is a non hwbinder thread, the client must have
     //    android.permission.SYSTEM_CAMERA permissions to connect.
 
-    int cPid = CameraThreadState::getCallingPid();
-    int cUid = CameraThreadState::getCallingUid();
-    bool systemClient = doesClientHaveSystemUid();
+    int cPid = getCallingPid();
+    int cUid = getCallingUid();
+    bool systemClient = callerHasSystemUid();
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &systemCameraKind) != OK) {
         // This isn't a known camera ID, so it's not a system camera
@@ -2222,7 +2391,7 @@
     }
 
     // (1) Cameraserver trying to connect, accept.
-    if (CameraThreadState::getCallingPid() == getpid()) {
+    if (isCallerCameraServerNotDelegating()) {
         return false;
     }
     // (2)
@@ -2250,26 +2419,31 @@
         const std::string& clientPackageName,
         const std::optional<std::string>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait,
+        bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
-
     ATRACE_CALL();
     Status ret = Status::ok();
     sp<CameraDeviceClient> client = nullptr;
     std::string clientPackageNameAdj = clientPackageName;
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
     bool systemNativeClient = false;
-    if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
-        std::string systemClient =
-                fmt::sprintf("client.pid<%d>", CameraThreadState::getCallingPid());
+    if (callerHasSystemUid() && (clientPackageNameAdj.size() == 0)) {
+        std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
         clientPackageNameAdj = systemClient;
         systemNativeClient = true;
     }
-    const std::string cameraId = resolveCameraId(
-            unresolvedCameraId,
-            CameraThreadState::getCallingUid(),
-            clientPackageNameAdj);
+
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, callingUid, clientPackageNameAdj);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
 
     if (oomScoreOffset < 0) {
         std::string msg =
@@ -2281,7 +2455,6 @@
     }
 
     userid_t clientUserId = multiuser_get_user_id(clientUid);
-    int callingUid = CameraThreadState::getCallingUid();
     if (clientUid == USE_CALLING_UID) {
         clientUserId = multiuser_get_user_id(callingUid);
     }
@@ -2298,8 +2471,8 @@
     // enforce system camera permissions
     if (oomScoreOffset > 0
             && !hasPermissionsForSystemCamera(cameraId, callingPid,
-                    CameraThreadState::getCallingUid())
-            && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+                    callingUid)
+            && !isTrustedCallingUid(callingUid)) {
         std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
                         clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
@@ -2313,7 +2486,7 @@
             targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
             /*out*/client);
 
-    if(!ret.isOk()) {
+    if (!ret.isOk()) {
         logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
         return ret;
     }
@@ -2339,25 +2512,69 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiBegin(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            callingUid,
+            callingPid);
     }
     return ret;
 }
 
+bool CameraService::isCameraPrivacyEnabled(const String16& packageName, const std::string& cam_id,
+        int callingPid, int callingUid) {
+    if (!isAutomotiveDevice()) {
+        return mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+    }
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for
+    // safety-critical use cases cannot be disabled and are exempt from camera privacy policy.
+    if ((isAutomotivePrivilegedClient(callingUid) && isAutomotiveExteriorSystemCamera(cam_id))) {
+        ALOGI("Camera privacy cannot be enabled for automotive privileged client %d "
+                "using camera %s", callingUid, cam_id.c_str());
+        return false;
+    }
+
+    if (mSensorPrivacyPolicy->isCameraPrivacyEnabled(packageName)) {
+        return true;
+    } else if (mSensorPrivacyPolicy->getCameraPrivacyState() == SensorPrivacyManager::DISABLED) {
+        return false;
+    } else if (mSensorPrivacyPolicy->getCameraPrivacyState()
+            == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
+        if (hasPermissionsForCameraPrivacyAllowlist(callingPid, callingUid)) {
+            return false;
+        } else {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string CameraService::getPackageNameFromUid(int clientUid) {
     std::string packageName("");
 
-    sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
-    if (binder == 0) {
-        ALOGE("Cannot get permission service");
+    sp<IPermissionController> permCtrl;
+    if (flags::cache_permission_services()) {
+        permCtrl = getPermissionController();
+    } else {
+        sp<IServiceManager> sm = defaultServiceManager();
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+        // Using deprecated function to preserve functionality until the
+        // cache_permission_services flag is removed.
+        sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
+#pragma clang diagnostic pop
+        if (binder == 0) {
+            ALOGE("Cannot get permission service");
+            permCtrl = nullptr;
+        } else {
+            permCtrl = interface_cast<IPermissionController>(binder);
+        }
+    }
+
+    if (permCtrl == nullptr) {
         // Return empty package name and the further interaction
         // with camera will likely fail
         return packageName;
     }
 
-    sp<IPermissionController> permCtrl = interface_cast<IPermissionController>(binder);
     Vector<String16> packages;
 
     permCtrl->getPackagesForUid(clientUid, packages);
@@ -2387,7 +2604,7 @@
     bool isNonSystemNdk = false;
     std::string clientPackageName;
     int packageUid = (clientUid == USE_CALLING_UID) ?
-            CameraThreadState::getCallingUid() : clientUid;
+            getCallingUid() : clientUid;
     if (clientPackageNameMaybe.size() <= 0) {
         // NDK calls don't come with package names, but we need one for various cases.
         // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -2403,7 +2620,7 @@
     int originalClientPid = 0;
 
     int packagePid = (clientPid == USE_CALLING_PID) ?
-        CameraThreadState::getCallingPid() : clientPid;
+        getCallingPid() : clientPid;
     ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
             "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
             static_cast<int>(effectiveApiLevel));
@@ -2428,7 +2645,7 @@
         }
 
         // Enforce client permissions and do basic validity checks
-        if(!(ret = validateConnectLocked(cameraId, clientPackageName,
+        if (!(ret = validateConnectLocked(cameraId, clientPackageName,
                 /*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
             return ret;
         }
@@ -2617,38 +2834,39 @@
             }
         }
 
-        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
-        // cases such as rear view and surround view cannot be disabled and are exempt from camera
-        // privacy policy.
-        if ((!isAutomotivePrivilegedClient(packageUid) ||
-                !isAutomotiveExteriorSystemCamera(cameraId))) {
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
             // Set camera muting behavior.
-            bool isCameraPrivacyEnabled =
+            isCameraPrivacyEnabled = this->isCameraPrivacyEnabled(
+                    toString16(client->getPackageName()), cameraId, packagePid, packageUid);
+        } else {
+            isCameraPrivacyEnabled =
                     mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-            if (client->supportsCameraMute()) {
-                client->setCameraMute(
-                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-            } else if (isCameraPrivacyEnabled) {
-                // no camera mute supported, but privacy is on! => disconnect
-                ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                        client->getPackageName().c_str(), cameraId.c_str());
-                // Do not hold mServiceLock while disconnecting clients, but
-                // retain the condition blocking other clients from connecting
-                // in mServiceLockWrapper if held.
-                mServiceLock.unlock();
-                // Clear caller identity temporarily so client disconnect PID
-                // checks work correctly
-                int64_t token = CameraThreadState::clearCallingIdentity();
-                // Note AppOp to trigger the "Unblock" dialog
-                client->noteAppOp();
-                client->disconnect();
-                CameraThreadState::restoreCallingIdentity(token);
-                // Reacquire mServiceLock
-                mServiceLock.lock();
+        }
 
-                return STATUS_ERROR_FMT(ERROR_DISABLED,
-                        "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
-            }
+        if (client->supportsCameraMute()) {
+            client->setCameraMute(
+                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+        } else if (isCameraPrivacyEnabled) {
+            // no camera mute supported, but privacy is on! => disconnect
+            ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                    client->getPackageName().c_str(), cameraId.c_str());
+            // Do not hold mServiceLock while disconnecting clients, but
+            // retain the condition blocking other clients from connecting
+            // in mServiceLockWrapper if held.
+            mServiceLock.unlock();
+            // Clear caller identity temporarily so client disconnect PID
+            // checks work correctly
+            int64_t token = clearCallingIdentity();
+            // Note AppOp to trigger the "Unblock" dialog
+            client->noteAppOp();
+            client->disconnect();
+            restoreCallingIdentity(token);
+            // Reacquire mServiceLock
+            mServiceLock.lock();
+
+            return STATUS_ERROR_FMT(ERROR_DISABLED,
+                    "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
         }
 
         if (shimUpdateOnly) {
@@ -2785,7 +3003,8 @@
 }
 
 Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder) {
+        int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2795,8 +3014,17 @@
                 "Torch client binder in null.");
     }
 
-    int uid = CameraThreadState::getCallingUid();
-    const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
+    int uid = getCallingUid();
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, uid);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
+
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to change the strength level"
                 "for system only device %s: ", cameraId.c_str());
@@ -2903,7 +3131,7 @@
         clientBinder->linkToDeath(this);
     }
 
-    int clientPid = CameraThreadState::getCallingPid();
+    int clientPid = getCallingPid();
     ALOGI("%s: Torch strength for camera id %s changed to %d for client PID %d",
             __FUNCTION__, cameraId.c_str(), torchStrength, clientPid);
     if (!shouldSkipTorchStrengthUpdates) {
@@ -2913,7 +3141,7 @@
 }
 
 Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
-        const sp<IBinder>& clientBinder) {
+        const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2923,8 +3151,16 @@
                 "Torch client Binder is null");
     }
 
-    int uid = CameraThreadState::getCallingUid();
-    const std::string cameraId = resolveCameraId(unresolvedCameraId, uid);
+    int uid = getCallingUid();
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
+            devicePolicy, uid);
+    if (!cameraIdOptional.has_value()) {
+        std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                unresolvedCameraId.c_str(), deviceId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+    }
+    std::string cameraId = cameraIdOptional.value();
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
@@ -3031,7 +3267,7 @@
         }
     }
 
-    int clientPid = CameraThreadState::getCallingPid();
+    int clientPid = getCallingPid();
     std::string torchState = enabled ? "on" : "off";
     ALOGI("Torch for camera id %s turned %s for client PID %d", cameraId.c_str(),
             torchState.c_str(), clientPid);
@@ -3051,7 +3287,7 @@
 
 Status CameraService::notifySystemEvent(int32_t eventId,
         const std::vector<int32_t>& args) {
-    const int pid = CameraThreadState::getCallingPid();
+    const int pid = getCallingPid();
     const int selfPid = getpid();
 
     // Permission checks
@@ -3059,7 +3295,7 @@
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
         if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about system"
                     " events from pid=%d, uid=%d", pid, uid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3125,7 +3361,7 @@
 }
 
 Status CameraService::notifyDeviceStateChange(int64_t newState) {
-    const int pid = CameraThreadState::getCallingPid();
+    const int pid = getCallingPid();
     const int selfPid = getpid();
 
     // Permission checks
@@ -3133,7 +3369,7 @@
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
         if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about device"
                     " state changes from pid=%d, uid=%d", pid, uid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3156,7 +3392,7 @@
 
 Status CameraService::notifyDisplayConfigurationChange() {
     ATRACE_CALL();
-    const int callingPid = CameraThreadState::getCallingPid();
+    const int callingPid = getCallingPid();
     const int selfPid = getpid();
 
     // Permission checks
@@ -3164,7 +3400,7 @@
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
         if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about orientation"
                     " changes from pid=%d, uid=%d", callingPid, uid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
@@ -3195,6 +3431,7 @@
     return Status::ok();
 }
 
+// TODO(b/291736219): This to be made device-aware.
 Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
@@ -3238,22 +3475,6 @@
     return Status::ok();
 }
 
-bool CameraService::hasCameraPermissions() const {
-    int callingPid = CameraThreadState::getCallingPid();
-    int callingUid = CameraThreadState::getCallingUid();
-    AttributionSourceState attributionSource{};
-    attributionSource.pid = callingPid;
-    attributionSource.uid = callingUid;
-    bool res = checkPermission(std::string(), sCameraPermission,
-            attributionSource, std::string(), AppOpsManager::OP_NONE);
-
-    bool hasPermission = ((callingPid == getpid()) || res);
-    if (!hasPermission) {
-        ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
-    }
-    return hasPermission;
-}
-
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
         int targetSdkVersion, /*out*/bool* isSupported) {
@@ -3269,7 +3490,12 @@
     }
 
     // Check for camera permissions
-    if (!hasCameraPermissions()) {
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
+    // TODO(b/291736219): Pass deviceId owning the camera if we make this method device-aware.
+    bool hasCameraPermission = ((callingPid == getpid()) ||
+            hasPermissionsForCamera(callingPid, callingUid, kDefaultDeviceId));
+    if (!hasCameraPermission) {
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
                 "isConcurrentSessionConfigurationSupported");
@@ -3303,7 +3529,6 @@
         /*out*/
         std::vector<hardware::CameraStatus> *cameraStatuses,
         bool isVendorListener, bool isProcessLocalTest) {
-
     ATRACE_CALL();
 
     ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
@@ -3313,15 +3538,9 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Null listener given to addListener");
     }
 
-    auto clientUid = CameraThreadState::getCallingUid();
-    auto clientPid = CameraThreadState::getCallingPid();
-    AttributionSourceState attributionSource{};
-    attributionSource.uid = clientUid;
-    attributionSource.pid = clientPid;
-
-   bool openCloseCallbackAllowed = checkPermission(std::string(),
-            sCameraOpenCloseListenerPermission, attributionSource, std::string(),
-            AppOpsManager::OP_NONE);
+    auto clientPid = getCallingPid();
+    auto clientUid = getCallingUid();
+    bool openCloseCallbackAllowed = hasPermissionsForOpenCloseListener(clientPid, clientUid);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -3357,9 +3576,14 @@
     {
         Mutex::Autolock lock(mCameraStatesLock);
         for (auto& i : mCameraStates) {
-            cameraStatuses->emplace_back(i.first,
+            // Get the device id and app-visible camera id for the given HAL-visible camera id.
+            auto [deviceId, mappedCameraId] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(i.first);
+
+            cameraStatuses->emplace_back(mappedCameraId,
                     mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds(),
-                    openCloseCallbackAllowed ? i.second->getClientPackage() : std::string());
+                    openCloseCallbackAllowed ? i.second->getClientPackage() : std::string(),
+                    deviceId);
         }
     }
     // Remove the camera statuses that should be hidden from the client, we do
@@ -3368,19 +3592,38 @@
     // the same time.
     cameraStatuses->erase(std::remove_if(cameraStatuses->begin(), cameraStatuses->end(),
                 [this, &isVendorListener, &clientPid, &clientUid](const hardware::CameraStatus& s) {
-                    SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-                    if (getSystemCameraKind(s.cameraId, &deviceKind) != OK) {
-                        ALOGE("%s: Invalid camera id %s, skipping status update",
-                                __FUNCTION__, s.cameraId.c_str());
-                        return true;
-                    }
-                    return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
-                            clientUid);}), cameraStatuses->end());
+                        std::string cameraId = s.cameraId;
+                        std::optional<std::string> cameraIdOptional = resolveCameraId(s.cameraId,
+                                s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM,
+                                clientUid);
+                        if (!cameraIdOptional.has_value()) {
+                            std::string msg =
+                                    fmt::sprintf(
+                                            "Camera %s: Invalid camera id for device id %d",
+                                            s.cameraId.c_str(), s.deviceId);
+                            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                            return true;
+                        }
+                        cameraId = cameraIdOptional.value();
+                        SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
+                        if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+                            ALOGE("%s: Invalid camera id %s, skipping status update",
+                                    __FUNCTION__, s.cameraId.c_str());
+                            return true;
+                        }
+                        return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
+                                clientUid);
+                     }), cameraStatuses->end());
 
-    //cameraStatuses will have non-eligible camera ids removed.
+    // cameraStatuses will have non-eligible camera ids removed.
     std::set<std::string> idsChosenForCallback;
     for (const auto &s : *cameraStatuses) {
-        idsChosenForCallback.insert(s.cameraId);
+        // Add only default device cameras here, as virtual cameras currently don't support torch
+        // anyway. Note that this is a simplification of the implementation here, and we should
+        // change this when virtual cameras support torch.
+        if (s.deviceId == kDefaultDeviceId) {
+            idsChosenForCallback.insert(s.cameraId);
+        }
     }
 
     /*
@@ -3394,7 +3637,8 @@
             // The camera id is visible to the client. Fine to send torch
             // callback.
             if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
-                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+                listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id,
+                        kDefaultDeviceId);
             }
         }
     }
@@ -3462,7 +3706,7 @@
     ATRACE_CALL();
 
     const std::string cameraId = resolveCameraId(
-            unresolvedCameraId, CameraThreadState::getCallingUid());
+            unresolvedCameraId, getCallingUid());
 
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
@@ -3527,7 +3771,7 @@
     ATRACE_CALL();
 
     const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            CameraThreadState::getCallingUid());
+            getCallingUid());
 
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
@@ -3544,11 +3788,25 @@
     ATRACE_CALL();
 
     if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = CameraThreadState::getCallingPid();
-        const int uid = CameraThreadState::getCallingUid();
+        const int pid = getCallingPid();
+        const int uid = getCallingUid();
         ALOGE("Permission Denial: can't inject camera pid=%d, uid=%d", pid, uid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                        "Permission Denial: no permission to inject camera");
+                "Permission Denial: no permission to inject camera");
+    }
+
+    // Do not allow any camera injection that injects or replaces a virtual camera.
+    auto [deviceIdForInternalCamera, _] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(internalCamId);
+    if (deviceIdForInternalCamera != kDefaultDeviceId) {
+        return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED,
+                "Cannot replace a virtual camera");
+    }
+    [[maybe_unused]] auto [deviceIdForExternalCamera, unusedMappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(externalCamId);
+    if (deviceIdForExternalCamera != kDefaultDeviceId) {
+        return STATUS_ERROR(ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED,
+                "Cannot inject a virtual camera to replace an internal camera");
     }
 
     ALOGV(
@@ -3578,7 +3836,7 @@
                         mInjectionExternalCamId.c_str());
             }
             res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
-            if(res != OK) {
+            if (res != OK) {
                 mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
             }
         } else {
@@ -3615,7 +3873,6 @@
         std::unique_ptr<AutoConditionLock> lock =
                 AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
 
-
         std::vector<sp<BasicClient>> evicted;
         for (auto& i : mActiveClientManager.getAll()) {
             auto clientSp = i->getValue();
@@ -3747,13 +4004,13 @@
     mServiceLock.unlock();
 
     // Clear caller identity temporarily so client disconnect PID checks work correctly
-    int64_t token = CameraThreadState::clearCallingIdentity();
+    int64_t token = clearCallingIdentity();
 
     for (auto& i : evicted) {
         i->disconnect();
     }
 
-    CameraThreadState::restoreCallingIdentity(token);
+    restoreCallingIdentity(token);
 
     // Reacquire mServiceLock
     mServiceLock.lock();
@@ -3884,7 +4141,6 @@
 // We share the media players for shutter and recording sound for all clients.
 // A reference count is kept to determine when we will actually release the
 // media players.
-
 sp<MediaPlayer> CameraService::newMediaPlayer(const char *file) {
     sp<MediaPlayer> mp = new MediaPlayer();
     status_t error;
@@ -3964,6 +4220,7 @@
 
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName, bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
         const std::string& cameraIdStr,
@@ -3972,13 +4229,14 @@
         int servicePid, bool overrideToPortrait) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
+                attributionAndPermissionUtils,
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
                 servicePid, overrideToPortrait),
         mCameraId(api1CameraId)
 {
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
 
     mRemoteCallback = cameraClient;
@@ -4002,10 +4260,12 @@
 
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName, bool nativeClient,
         const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, bool overrideToPortrait):
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
         mClientPackageName(clientPackageName), mSystemNativeClient(nativeClient),
@@ -4075,8 +4335,8 @@
     const auto& mActivityManager = getActivityManager();
     if (mActivityManager) {
         mActivityManager->logFgsApiEnd(LOG_FGS_CAMERA_API,
-            CameraThreadState::getCallingUid(),
-            CameraThreadState::getCallingPid());
+            getCallingUid(),
+            getCallingPid());
     }
 
     return res;
@@ -4086,7 +4346,7 @@
     // No dumping of clients directly over Binder,
     // must go through CameraService::dump
     android_errorWriteWithInfoLog(SN_EVENT_LOG_ID, "26265403",
-            CameraThreadState::getCallingUid(), NULL, 0);
+            getCallingUid(), NULL, 0);
     return OK;
 }
 
@@ -4169,8 +4429,15 @@
         // return MODE_IGNORED. Do not treat such case as error.
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid,
                 mClientPackageName);
-        bool isCameraPrivacyEnabled =
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
+            isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+                    toString16(mClientPackageName), std::string(), mClientPid, mClientUid);
+        } else {
+            isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+        }
         // We don't want to return EACCESS if the CameraPrivacy is enabled.
         // We prefer to successfully open the camera and perform camera muting
         // or blocking in connectHelper as handleAppOpMode can be called before the
@@ -4196,8 +4463,15 @@
     if (mAppOpsManager != nullptr) {
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
-        mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+
+        if (flags::watch_foreground_changes()) {
+            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+                toString16(mClientPackageName),
+                AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
+        } else {
+            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
                 toString16(mClientPackageName), mOpsCallback);
+        }
 
         // Just check for camera acccess here on open - delay startOp until
         // camera frames start streaming in startCameraStreamingOps
@@ -4357,20 +4631,42 @@
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
-        bool isCameraPrivacyEnabled =
+
+        // Uid may be active, but not visible to the user (e.g. PROCESS_STATE_FOREGROUND_SERVICE).
+        // If not visible, but still active, then we want to block instead of muting the camera.
+        int32_t procState = sCameraService->mUidPolicy->getProcState(mClientUid);
+        bool isUidVisible = (procState <= ActivityManager::PROCESS_STATE_BOUND_TOP);
+
+        bool isCameraPrivacyEnabled;
+        if (flags::camera_privacy_allowlist()) {
+            isCameraPrivacyEnabled = sCameraService->isCameraPrivacyEnabled(
+                    toString16(mClientPackageName),std::string(),mClientPid,mClientUid);
+        } else {
+            isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
-                mCameraIdStr.c_str(), mClientPackageName.c_str(),
-                mUidIsTrusted, isUidActive);
-        // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
-        // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
-        // error.
+        }
+
+        ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d"
+                " isUidVisible %d, isCameraPrivacyEnabled %d", mCameraIdStr.c_str(),
+                mClientPackageName.c_str(), mUidIsTrusted, isUidActive, isUidVisible,
+                isCameraPrivacyEnabled);
+        // If the calling Uid is trusted (a native service), or the client Uid is active / visible
+        // (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
+        // cases as error.
         if (!mUidIsTrusted) {
-            if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
-                setCameraMute(true);
-            } else if (!isUidActive
-                || (isCameraPrivacyEnabled && !supportsCameraMute())) {
-                block();
+            if (flags::watch_foreground_changes()) {
+                if (isUidVisible && isCameraPrivacyEnabled && supportsCameraMute()) {
+                    setCameraMute(true);
+                } else {
+                    block();
+                }
+            } else {
+                if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
+                    setCameraMute(true);
+                } else if (!isUidActive
+                    || (isCameraPrivacyEnabled && !supportsCameraMute())) {
+                    block();
+                }
             }
         }
     } else if (res == AppOpsManager::MODE_ALLOWED) {
@@ -4383,7 +4679,7 @@
 
     // Reset the client PID to allow server-initiated disconnect,
     // and to prevent further calls by client.
-    mClientPid = CameraThreadState::getCallingPid();
+    mClientPid = getCallingPid();
     CaptureResultExtras resultExtras; // a dummy result (invalid)
     notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISABLED, resultExtras);
     disconnect();
@@ -4741,7 +5037,15 @@
     }
     hasCameraPrivacyFeature(); // Called so the result is cached
     mSpm.addSensorPrivacyListener(this);
+    if (isAutomotiveDevice()) {
+        mSpm.addToggleSensorPrivacyListener(this);
+    }
     mSensorPrivacyEnabled = mSpm.isSensorPrivacyEnabled();
+    if (flags::camera_privacy_allowlist()) {
+        mCameraPrivacyState = mSpm.getToggleSensorPrivacyState(
+                SensorPrivacyManager::TOGGLE_TYPE_SOFTWARE,
+                SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
+    }
     status_t res = mSpm.linkToDeath(this);
     if (res == OK) {
         mRegistered = true;
@@ -4773,6 +5077,9 @@
 void CameraService::SensorPrivacyPolicy::unregisterSelf() {
     Mutex::Autolock _l(mSensorPrivacyLock);
     mSpm.removeSensorPrivacyListener(this);
+    if (isAutomotiveDevice()) {
+        mSpm.removeToggleSensorPrivacyListener(this);
+    }
     mSpm.unlinkToDeath(this);
     mRegistered = false;
     ALOGV("SensorPrivacyPolicy: Unregistered with SensorPrivacyManager");
@@ -4787,6 +5094,15 @@
     return mSensorPrivacyEnabled;
 }
 
+int CameraService::SensorPrivacyPolicy::getCameraPrivacyState() {
+    if (!mRegistered) {
+        registerWithSensorPrivacyManager();
+    }
+
+    Mutex::Autolock _l(mSensorPrivacyLock);
+    return mCameraPrivacyState;
+}
+
 bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled() {
     if (!hasCameraPrivacyFeature()) {
         return false;
@@ -4794,18 +5110,51 @@
     return mSpm.isToggleSensorPrivacyEnabled(SensorPrivacyManager::TOGGLE_SENSOR_CAMERA);
 }
 
+bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
+    if (!hasCameraPrivacyFeature()) {
+        return SensorPrivacyManager::DISABLED;
+    }
+    return mSpm.isCameraPrivacyEnabled(packageName);
+}
+
 binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyChanged(
-    int toggleType __unused, int sensor __unused, bool enabled) {
+    int toggleType, int sensor, bool enabled) {
+    if ((toggleType == SensorPrivacyManager::TOGGLE_TYPE_UNKNOWN)
+            && (sensor == SensorPrivacyManager::TOGGLE_SENSOR_UNKNOWN)) {
+        {
+            Mutex::Autolock _l(mSensorPrivacyLock);
+            mSensorPrivacyEnabled = enabled;
+        }
+        // if sensor privacy is enabled then block all clients from accessing the camera
+        if (enabled) {
+            sp<CameraService> service = mService.promote();
+            if (service != nullptr) {
+                service->blockAllClients();
+            }
+        }
+    }
+    return binder::Status::ok();
+}
+
+binder::Status CameraService::SensorPrivacyPolicy::onSensorPrivacyStateChanged(
+    int, int sensor, int state) {
+    if (!flags::camera_privacy_allowlist()
+            || (sensor != SensorPrivacyManager::TOGGLE_SENSOR_CAMERA)) {
+        return binder::Status::ok();
+    }
     {
         Mutex::Autolock _l(mSensorPrivacyLock);
-        mSensorPrivacyEnabled = enabled;
+        mCameraPrivacyState = state;
+    }
+    sp<CameraService> service = mService.promote();
+    if (!service) {
+        return binder::Status::ok();
     }
     // if sensor privacy is enabled then block all clients from accessing the camera
-    if (enabled) {
-        sp<CameraService> service = mService.promote();
-        if (service != nullptr) {
-            service->blockAllClients();
-        }
+    if (state == SensorPrivacyManager::ENABLED) {
+        service->blockAllClients();
+    } else if (state == SensorPrivacyManager::ENABLED_EXCEPT_ALLOWLISTED_APPS) {
+        service->blockPrivacyEnabledClients();
     }
     return binder::Status::ok();
 }
@@ -4920,7 +5269,6 @@
     }
 }
 
-
 // ----------------------------------------------------------------------------
 //                  CameraClientManager
 // ----------------------------------------------------------------------------
@@ -5162,8 +5510,8 @@
 
     if (checkCallingPermission(toString16(sDumpPermission)) == false) {
         dprintf(fd, "Permission Denial: can't dump CameraService from pid=%d, uid=%d\n",
-                CameraThreadState::getCallingPid(),
-                CameraThreadState::getCallingUid());
+                getCallingPid(),
+                getCallingUid());
         return NO_ERROR;
     }
     bool locked = tryLock(mServiceLock);
@@ -5412,7 +5760,7 @@
       * binder driver
       */
     // PID here is approximate and can be wrong.
-    logClientDied(CameraThreadState::getCallingPid(), "Binder died unexpectedly");
+    logClientDied(getCallingPid(), "Binder died unexpectedly");
 
     // check torch client
     handleTorchClientBinderDied(who);
@@ -5451,6 +5799,36 @@
         return;
     }
 
+    if (vd_flags::camera_device_awareness() && status == StatusInternal::PRESENT) {
+        CameraMetadata cameraInfo;
+        status_t res = mCameraProviderManager->getCameraCharacteristics(
+                cameraId, false, &cameraInfo, false);
+        if (res != OK) {
+            ALOGW("%s: Not able to get camera characteristics for camera id %s",
+                  __FUNCTION__, cameraId.c_str());
+        } else {
+            int32_t deviceId = getDeviceId(cameraInfo);
+            if (deviceId != kDefaultDeviceId) {
+                const auto &lensFacingEntry = cameraInfo.find(ANDROID_LENS_FACING);
+                camera_metadata_enum_android_lens_facing_t androidLensFacing =
+                        static_cast<camera_metadata_enum_android_lens_facing_t>(
+                                lensFacingEntry.data.u8[0]);
+                std::string mappedCameraId;
+                if (androidLensFacing == ANDROID_LENS_FACING_BACK) {
+                    mappedCameraId = kVirtualDeviceBackCameraId;
+                } else if (androidLensFacing == ANDROID_LENS_FACING_FRONT) {
+                    mappedCameraId = kVirtualDeviceFrontCameraId;
+                } else {
+                    ALOGD("%s: Not adding entry for an external camera of a virtual device",
+                          __func__);
+                }
+                if (!mappedCameraId.empty()) {
+                    mVirtualDeviceCameraIdMapper.addCamera(cameraId, deviceId, mappedCameraId);
+                }
+            }
+        }
+    }
+
     // Collect the logical cameras without holding mStatusLock in updateStatus
     // as that can lead to a deadlock(b/162192331).
     auto logicalCameraIds = getLogicalCameras(cameraId);
@@ -5459,55 +5837,67 @@
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
             (const std::string& cameraId, StatusInternal status) {
+                // Get the device id and app-visible camera id for the given HAL-visible camera id.
+                auto [deviceId, mappedCameraId] =
+                        mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
 
-            if (status != StatusInternal::ENUMERATING) {
-                // Update torch status if it has a flash unit.
-                Mutex::Autolock al(mTorchStatusMutex);
-                TorchModeStatus torchStatus;
-                if (getTorchStatusLocked(cameraId, &torchStatus) !=
-                        NAME_NOT_FOUND) {
-                    TorchModeStatus newTorchStatus =
-                            status == StatusInternal::PRESENT ?
-                            TorchModeStatus::AVAILABLE_OFF :
-                            TorchModeStatus::NOT_AVAILABLE;
-                    if (torchStatus != newTorchStatus) {
-                        onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
+                if (status != StatusInternal::ENUMERATING) {
+                    // Update torch status if it has a flash unit.
+                    Mutex::Autolock al(mTorchStatusMutex);
+                    TorchModeStatus torchStatus;
+                    if (getTorchStatusLocked(cameraId, &torchStatus) !=
+                            NAME_NOT_FOUND) {
+                        TorchModeStatus newTorchStatus =
+                                status == StatusInternal::PRESENT ?
+                                TorchModeStatus::AVAILABLE_OFF :
+                                TorchModeStatus::NOT_AVAILABLE;
+                        if (torchStatus != newTorchStatus) {
+                            onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
+                        }
                     }
                 }
-            }
 
-            Mutex::Autolock lock(mStatusListenerLock);
-            notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId,
-                    logicalCameraIds, deviceKind);
+                Mutex::Autolock lock(mStatusListenerLock);
+                notifyPhysicalCameraStatusLocked(mapToInterface(status), mappedCameraId,
+                        logicalCameraIds, deviceKind, deviceId);
 
-            for (auto& listener : mListenerList) {
-                bool isVendorListener = listener->isVendorListener();
-                if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
-                        listener->getListenerPid(), listener->getListenerUid())) {
-                    ALOGV("Skipping discovery callback for system-only camera device %s",
-                            cameraId.c_str());
-                    continue;
-                }
-                auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
-                        cameraId);
-                listener->handleBinderStatus(ret,
-                         "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
-                        __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
-                        ret.exceptionCode());
-                // Also trigger the callbacks for cameras that were remapped to the current
-                // cameraId for the specific package that this listener belongs to.
-                std::vector<std::string> remappedCameraIds =
-                        findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
-                for (auto& remappedCameraId : remappedCameraIds) {
-                    ret = listener->getListener()->onStatusChanged(
-                            mapToInterface(status), remappedCameraId);
+                for (auto& listener : mListenerList) {
+                    bool isVendorListener = listener->isVendorListener();
+                    if (shouldSkipStatusUpdates(deviceKind, isVendorListener,
+                            listener->getListenerPid(), listener->getListenerUid())) {
+                        ALOGV("Skipping discovery callback for system-only camera device %s",
+                              cameraId.c_str());
+                        continue;
+                    }
+
+                    auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
+                            mappedCameraId, deviceId);
                     listener->handleBinderStatus(ret,
-                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                            "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                             __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                             ret.exceptionCode());
+
+                    // Only cameras of the default device can be remapped to a different camera
+                    // (using remapCameraIds method), so do the following only if the camera is
+                    // associated with the default device.
+                    if (deviceId == kDefaultDeviceId) {
+                        // For the default device, also trigger the callbacks for cameras that were
+                        // remapped to the current cameraId for the specific package that this
+                        // listener belongs to.
+                        std::vector<std::string> remappedCameraIds =
+                                findOriginalIdsForRemappedCameraId(cameraId,
+                                        listener->getListenerUid());
+                        for (auto &remappedCameraId: remappedCameraIds) {
+                            ret = listener->getListener()->onStatusChanged(
+                                    mapToInterface(status), remappedCameraId, kDefaultDeviceId);
+                            listener->handleBinderStatus(ret,
+                                    "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
+                                    __FUNCTION__, listener->getListenerUid(),
+                                    listener->getListenerPid(), ret.exceptionCode());
+                        }
+                    }
                 }
-            }
-        });
+            });
 }
 
 void CameraService::updateOpenCloseStatus(const std::string& cameraId, bool open,
@@ -5524,6 +5914,10 @@
         state->setClientPackage(std::string());
     }
 
+    // Get the device id and app-visible camera id for the given HAL-visible camera id.
+    auto [deviceId, mappedCameraId] =
+            mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+
     Mutex::Autolock lock(mStatusListenerLock);
 
     for (const auto& it : mListenerList) {
@@ -5533,9 +5927,10 @@
 
         binder::Status ret;
         if (open) {
-            ret = it->getListener()->onCameraOpened(cameraId, clientPackageName);
+            ret = it->getListener()->onCameraOpened(mappedCameraId, clientPackageName,
+                    deviceId);
         } else {
-            ret = it->getListener()->onCameraClosed(cameraId);
+            ret = it->getListener()->onCameraClosed(mappedCameraId, deviceId);
         }
 
         it->handleBinderStatus(ret,
@@ -5628,7 +6023,7 @@
 
 void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
         const std::string& physicalCameraId, const std::list<std::string>& logicalCameraIds,
-        SystemCameraKind deviceKind) {
+        SystemCameraKind deviceKind, int32_t deviceId) {
     // mStatusListenerLock is expected to be locked
     for (const auto& logicalCameraId : logicalCameraIds) {
         for (auto& listener : mListenerList) {
@@ -5642,7 +6037,7 @@
                 continue;
             }
             auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
-                    logicalCameraId, physicalCameraId);
+                    logicalCameraId, physicalCameraId, deviceId);
             listener->handleBinderStatus(ret,
                     "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
                     __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -5651,7 +6046,6 @@
     }
 }
 
-
 void CameraService::blockClientsForUid(uid_t uid) {
     const auto clients = mActiveClientManager.getAll();
     for (auto& current : clients) {
@@ -5676,6 +6070,23 @@
     }
 }
 
+void CameraService::blockPrivacyEnabledClients() {
+    const auto clients = mActiveClientManager.getAll();
+    for (auto& current : clients) {
+        if (current != nullptr) {
+            const auto basicClient = current->getValue();
+            if (basicClient.get() != nullptr) {
+                std::string pkgName = basicClient->getPackageName();
+                bool cameraPrivacyEnabled =
+                        mSensorPrivacyPolicy->isCameraPrivacyEnabled(toString16(pkgName));
+                if (cameraPrivacyEnabled) {
+                    basicClient->block();
+                }
+           }
+        }
+    }
+}
+
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
     if (!checkCallingPermission(toString16(sManageCameraPermission), nullptr, nullptr)) {
@@ -6024,7 +6435,7 @@
                  "        prints the monitored information in real time\n"
                  "        Hit return to exit\n"
                  "  clear clears all buffers storing information for watch command");
-  return BAD_VALUE;
+    return BAD_VALUE;
 }
 
 status_t CameraService::startWatchingTags(const Vector<String16> &args, int outFd) {
@@ -6411,9 +6822,9 @@
         mServiceLock.unlock();
 
         // Clear caller identity temporarily so client disconnect PID checks work correctly
-        int64_t token = CameraThreadState::clearCallingIdentity();
+        int64_t token = clearCallingIdentity();
         clientSp->disconnect();
-        CameraThreadState::restoreCallingIdentity(token);
+        restoreCallingIdentity(token);
 
         // Reacquire mServiceLock
         mServiceLock.lock();
@@ -6432,4 +6843,4 @@
     mInjectionStatusListener->removeListener();
 }
 
-}; // namespace android
+} // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 1487013..8dbd591 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -34,6 +34,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/IActivityManager.h>
 #include <binder/IAppOpsCallback.h>
+#include <binder/IPermissionController.h>
 #include <binder/IUidObserver.h>
 #include <hardware/camera.h>
 #include <sensorprivacy/SensorPrivacyManager.h>
@@ -53,12 +54,15 @@
 #include "utils/ClientManager.h"
 #include "utils/IPCTransport.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
+#include "utils/VirtualDeviceCameraIdMapper.h"
 
 #include <set>
 #include <string>
 #include <list>
 #include <map>
 #include <memory>
+#include <mutex>
 #include <optional>
 #include <utility>
 #include <unordered_map>
@@ -77,7 +81,8 @@
     public virtual ::android::hardware::BnCameraService,
     public virtual IBinder::DeathRecipient,
     public virtual CameraProviderManager::StatusListener,
-    public virtual IServiceManager::LocalRegistrationCallback
+    public virtual IServiceManager::LocalRegistrationCallback,
+    public AttributionAndPermissionUtilsEncapsulator
 {
     friend class BinderService<CameraService>;
     friend class CameraOfflineSessionClient;
@@ -119,7 +124,9 @@
                         // Non-null arguments for cameraServiceProxyWrapper should be provided for
                         // testing purposes only.
                         CameraService(std::shared_ptr<CameraServiceProxyWrapper>
-                                cameraServiceProxyWrapper = nullptr);
+                                cameraServiceProxyWrapper = nullptr,
+                                std::shared_ptr<AttributionAndPermissionUtils>
+                                attributionAndPermissionUtils = nullptr);
     virtual             ~CameraService();
 
     /////////////////////////////////////////////////////////////////////
@@ -145,14 +152,17 @@
     /////////////////////////////////////////////////////////////////////
     // ICameraService
     // IMPORTANT: All binder calls that deal with logicalCameraId should use
-    // resolveCameraId(logicalCameraId) to arrive at the correct cameraId to
-    // perform the operation on (in case of Id Remapping).
-    virtual binder::Status     getNumberOfCameras(int32_t type, int32_t* numCameras);
+    // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
+    // cameraId to perform the operation on (in case of Id Remapping, or in case of contexts
+    // associated with virtual devices).
+    virtual binder::Status     getNumberOfCameras(int32_t type, int32_t deviceId,
+            int32_t devicePolicy, int32_t* numCameras);
 
     virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
-            hardware::CameraInfo* cameraInfo) override;
+            int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
-            int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
+            int targetSdkVersion, bool overrideToPortrait, int32_t deviceId,
+            int32_t devicePolicy, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
             hardware::camera2::params::VendorTagDescriptor* desc);
@@ -163,15 +173,15 @@
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
             int32_t cameraId, const std::string& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode,
-            /*out*/
-            sp<hardware::ICamera>* device) override;
+            bool overrideToPortrait, bool forceSlowJpegMode, int32_t deviceId,
+            int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
             const std::string& cameraId,
             const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
             int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
+            int32_t deviceId, int32_t devicePolicy,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -181,6 +191,7 @@
     virtual binder::Status    removeListener(
             const sp<hardware::ICameraServiceListener>& listener);
 
+    // TODO(b/291736219): This to be made device-aware.
     virtual binder::Status getConcurrentCameraIds(
         /*out*/
         std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination>* concurrentCameraIds);
@@ -195,13 +206,14 @@
             std::string* parameters);
 
     virtual binder::Status    setTorchMode(const std::string& cameraId, bool enabled,
-            const sp<IBinder>& clientBinder);
+            const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy);
 
     virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
-            int32_t torchStrength, const sp<IBinder>& clientBinder);
+            int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+            int32_t devicePolicy);
 
-    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId,
-            int32_t* torchStrength);
+    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId, int32_t deviceId,
+            int32_t devicePolicy, int32_t* torchStrength);
 
     virtual binder::Status    notifySystemEvent(int32_t eventId,
             const std::vector<int32_t>& args);
@@ -239,14 +251,20 @@
             const hardware::camera2::impl::CameraMetadataNative& sessionParams);
 
     virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
+            int32_t deviceId, int32_t devicePolicy,
             /*out*/
             hardware::camera2::impl::CameraMetadataNative* request);
 
     virtual binder::Status isSessionConfigurationWithParametersSupported(
-            const std::string& cameraId,
+            const std::string& cameraId, int targetSdkVersion,
             const SessionConfiguration& sessionConfiguration,
-            /*out*/
-            bool* supported);
+            int32_t deviceId, int32_t devicePolicy,
+            /*out*/ bool* supported);
+
+    virtual binder::Status getSessionCharacteristics(
+            const std::string& cameraId, int targetSdkVersion, bool overrideToPortrait,
+            const SessionConfiguration& sessionConfiguration, int32_t deviceId,
+            int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
 
     // Extra permissions checks
     virtual status_t    onTransact(uint32_t code, const Parcel& data,
@@ -309,10 +327,20 @@
     // Shared utilities
     static binder::Status filterGetInfoErrorCode(status_t err);
 
+    /**
+     * Returns true if the device is an automotive device and cameraId is system
+     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+     */
+    bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
+
     /////////////////////////////////////////////////////////////////////
     // CameraClient functionality
 
-    class BasicClient : public virtual RefBase {
+    class BasicClient :
+        public virtual RefBase,
+        public AttributionAndPermissionUtilsEncapsulator {
     friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
@@ -423,6 +451,7 @@
     protected:
         BasicClient(const sp<CameraService>& cameraService,
                 const sp<IBinder>& remoteCallback,
+                std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                 const std::string& clientPackageName,
                 bool nativeClient,
                 const std::optional<std::string>& clientFeatureId,
@@ -536,6 +565,7 @@
         // Interface used by CameraService
         Client(const sp<CameraService>& cameraService,
                 const sp<hardware::ICameraClient>& cameraClient,
+                std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                 const std::string& clientPackageName,
                 bool systemNativeClient,
                 const std::optional<std::string>& clientFeatureId,
@@ -639,13 +669,6 @@
     int32_t updateAudioRestrictionLocked();
 
 private:
-    /**
-     * Returns true if the device is an automotive device and cameraId is system
-     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
-     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
-     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
-     */
-    bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
 
     // TODO: b/263304156 update this to make use of a death callback for more
     // robust/fault tolerant logging
@@ -661,27 +684,26 @@
         return activityManager;
     }
 
+    static const sp<IPermissionController>& getPermissionController() {
+        static const char* kPermissionControllerService = "permission";
+        static thread_local sp<IPermissionController> sPermissionController = nullptr;
+
+        if (sPermissionController == nullptr ||
+                !IInterface::asBinder(sPermissionController)->isBinderAlive()) {
+            sp<IServiceManager> sm = defaultServiceManager();
+            sp<IBinder> binder = sm->checkService(toString16(kPermissionControllerService));
+            if (binder == nullptr) {
+                ALOGE("%s: Could not get permission service", __FUNCTION__);
+                sPermissionController = nullptr;
+            } else {
+                sPermissionController = interface_cast<IPermissionController>(binder);
+            }
+        }
+
+        return sPermissionController;
+    }
+
     /**
-     * Pre-grants the permission if the attribution source uid is for an automotive
-     * privileged client. Otherwise uses system service permission checker to check
-     * for the appropriate permission. If this function is called for accessing a specific
-     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
-     * privileged client so that permission is pre-granted only to access system camera device
-     * which is located outside of the vehicle body frame because camera located inside the vehicle
-     * cabin would need user permission.
-     */
-    bool checkPermission(const std::string& cameraId, const std::string& permission,
-            const content::AttributionSourceState& attributionSource, const std::string& message,
-            int32_t attributedOpCode) const;
-
-    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
-            const;
-
-    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
-            int callingUid) const;
-
-    bool hasCameraPermissions() const;
-   /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
      */
@@ -865,19 +887,27 @@
     // prevented from accessing the camera.
     class SensorPrivacyPolicy : public hardware::BnSensorPrivacyListener,
             public virtual IBinder::DeathRecipient,
-            public virtual IServiceManager::LocalRegistrationCallback {
+            public virtual IServiceManager::LocalRegistrationCallback,
+            public AttributionAndPermissionUtilsEncapsulator {
         public:
-            explicit SensorPrivacyPolicy(wp<CameraService> service)
-                    : mService(service), mSensorPrivacyEnabled(false), mRegistered(false) {}
+            explicit SensorPrivacyPolicy(wp<CameraService> service,
+                    std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+                    : AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
+                      mService(service),
+                      mSensorPrivacyEnabled(false),
+                    mCameraPrivacyState(SensorPrivacyManager::DISABLED), mRegistered(false) {}
 
             void registerSelf();
             void unregisterSelf();
 
             bool isSensorPrivacyEnabled();
             bool isCameraPrivacyEnabled();
+            int getCameraPrivacyState();
+            bool isCameraPrivacyEnabled(const String16& packageName);
 
             binder::Status onSensorPrivacyChanged(int toggleType, int sensor,
                                                   bool enabled);
+            binder::Status onSensorPrivacyStateChanged(int toggleType, int sensor, int state);
 
             // Implementation of IServiceManager::LocalRegistrationCallback
             virtual void onServiceRegistration(const String16& name,
@@ -890,6 +920,7 @@
             wp<CameraService> mService;
             Mutex mSensorPrivacyLock;
             bool mSensorPrivacyEnabled;
+            int mCameraPrivacyState;
             bool mRegistered;
 
             bool hasCameraPrivacyFeature();
@@ -926,6 +957,9 @@
             const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
             /*out*/int& originalClientPid) const;
 
+    bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
+           int clientPid, int ClientUid);
+
     // Handle active client evictions, and update service state.
     // Only call with with mServiceLock held.
     status_t handleEvictionsLocked(const std::string& cameraId, int clientPid,
@@ -996,6 +1030,10 @@
     // Adds client logs during closed session to the file pointed by fd.
     void dumpClosedSessionClientLogs(int fd, const std::string& cameraId);
 
+    binder::Status isSessionConfigurationWithParametersSupportedUnsafe(
+            const std::string& cameraId, const SessionConfiguration& sessionConfiguration,
+            bool overrideForPerfClass, /*out*/ bool* supported);
+
     // Mapping from camera ID -> state for each device, map is protected by mCameraStatesLock
     std::map<std::string, std::shared_ptr<CameraState>> mCameraStates;
 
@@ -1019,10 +1057,11 @@
             /* out */ TCameraIdRemapping* cameraIdRemappingMap);
 
     /**
-     * Resolve the (potentially remapped) camera Id to use for packageName.
+     * Resolve the (potentially remapped) camera id to use for packageName for the default device
+     * context.
      *
-     * This returns the Camera Id to use in case inputCameraId was remapped to a
-     * different Id for the given packageName. Otherwise, it returns the inputCameraId.
+     * This returns the Camera id to use in case inputCameraId was remapped to a
+     * different id for the given packageName. Otherwise, it returns the inputCameraId.
      *
      * If the packageName is not provided, it will be inferred from the clientUid.
      */
@@ -1032,6 +1071,26 @@
             const std::string& packageName = "");
 
     /**
+     * Resolve the (potentially remapped) camera id for the given input camera id and the given
+     * device id and device policy (for the device associated with the context of the caller).
+     *
+     * For any context associated with the default device or a virtual device with default camera
+     * policy, this will return the actual camera id (in case inputCameraId was remapped using
+     * the remapCameraIds method).
+     *
+     * For any context associated with a virtual device with custom camera policy, this will return
+     * the actual camera id if inputCameraId corresponds to the mapped id of a virtual camera
+     * (for virtual devices with custom camera policy, the back and front virtual cameras of that
+     * device would have 0 and 1 respectively as their mapped camera id).
+     */
+    std::optional<std::string> resolveCameraId(
+            const std::string& inputCameraId,
+            int32_t deviceId,
+            int32_t devicePolicy,
+            int clientUid,
+            const std::string& packageName = "");
+
+    /**
      * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
      */
     void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
@@ -1096,13 +1155,13 @@
      * Returns the underlying camera Id string mapped to a camera id int
      * Empty string is returned when the cameraIdInt is invalid.
      */
-    std::string cameraIdIntToStr(int cameraIdInt);
+    std::string cameraIdIntToStr(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
 
     /**
      * Returns the underlying camera Id string mapped to a camera id int
      * Empty string is returned when the cameraIdInt is invalid.
      */
-    std::string cameraIdIntToStrLocked(int cameraIdInt);
+    std::string cameraIdIntToStrLocked(int cameraIdInt, int32_t deviceId, int32_t devicePolicy);
 
     /**
      * Remove a single client corresponding to the given camera id from the list of active clients.
@@ -1302,6 +1361,8 @@
      *
      * This method must be idempotent.
      * This method acquires mStatusLock and mStatusListenerLock.
+     * For any virtual camera, this method must pass its mapped camera id and device id to
+     * ICameraServiceListeners (using mVirtualDeviceCameraIdMapper).
      */
     void updateStatus(StatusInternal status,
             const std::string& cameraId,
@@ -1355,7 +1416,8 @@
     // notify physical camera status when the physical camera is public.
     // Expects mStatusListenerLock to be locked.
     void notifyPhysicalCameraStatusLocked(int32_t status, const std::string& physicalCameraId,
-            const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind);
+            const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind,
+            int32_t virtualDeviceId);
 
     // get list of logical cameras which are backed by physicalCameraId
     std::list<std::string> getLogicalCameras(const std::string& physicalCameraId);
@@ -1385,6 +1447,9 @@
     // Blocks all active clients.
     void blockAllClients();
 
+    // Blocks clients whose privacy is enabled.
+    void blockPrivacyEnabledClients();
+
     // Overrides the UID state as if it is idle
     status_t handleSetUidState(const Vector<String16>& args, int err);
 
@@ -1467,6 +1532,12 @@
     // responsibility to acquire mLogLock before calling this functions.
     bool isClientWatchedLocked(const BasicClient *client);
 
+    // Filters out fingerprintable keys if the calling process does not have CAMERA permission.
+    // Note: function caller should ensure that shouldRejectSystemCameraConnection is checked
+    // for the calling process before calling this function.
+    binder::Status filterSensitiveMetadataIfNeeded(const std::string& cameraId,
+                                                   CameraMetadata* metadata);
+
     /**
      * Get the current system time as a formatted string.
      */
@@ -1588,6 +1659,8 @@
     int64_t mDeviceState;
 
     void updateTorchUidMapLocked(const std::string& cameraId, int uid);
+
+    VirtualDeviceCameraIdMapper mVirtualDeviceCameraIdMapper;
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/TEST_MAPPING b/services/camera/libcameraservice/TEST_MAPPING
index ca6cc58..6257aee 100644
--- a/services/camera/libcameraservice/TEST_MAPPING
+++ b/services/camera/libcameraservice/TEST_MAPPING
@@ -4,6 +4,17 @@
       "name": "cameraservice_test"
     }
   ],
+  "postsubmit": [
+    {
+      "name": "CtsVirtualDevicesCameraTestCases",
+      "options": [
+        {
+          "exclude-annotation": "androidx.test.filters.FlakyTest"
+        }
+      ],
+      "keywords": ["primary-device"]
+    }
+  ],
   "imports": [
     {
       "path": "frameworks/av/camera"
diff --git a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
index 954cb8b..9e6a925 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraDeviceUser.cpp
@@ -20,6 +20,7 @@
 #include <aidl/AidlUtils.h>
 #include <aidl/android/frameworks/cameraservice/device/CaptureMetadataInfo.h>
 #include <android-base/properties.h>
+#include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::device::implementation {
 
@@ -56,7 +57,7 @@
 AidlCameraDeviceUser::AidlCameraDeviceUser(const sp<UICameraDeviceUser>& deviceRemote):
       mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 
 bool AidlCameraDeviceUser::initDevice() {
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 8cd7d1f..cb11023 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -26,7 +26,9 @@
 #include <android/binder_ibinder.h>
 #include <android/binder_manager.h>
 #include <binder/Status.h>
+#include <camera/CameraUtils.h>
 #include <hidl/HidlTransportSupport.h>
+#include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
 
@@ -79,7 +81,7 @@
 
 AidlCameraService::AidlCameraService(::android::CameraService* cameraService):
       mCameraService(cameraService) {
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 ScopedAStatus AidlCameraService::getCameraCharacteristics(const std::string& in_cameraId,
                                                           SCameraMetadata* _aidl_return) {
@@ -89,6 +91,8 @@
     UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
                                                            /* overrideToPortrait= */ false,
+                                                           kDefaultDeviceId,
+                                                           /* devicePolicy= */ 0,
                                                            &cameraMetadata);
     if (!ret.isOk()) {
         if (ret.exceptionCode() != EX_SERVICE_SPECIFIC) {
@@ -147,6 +151,8 @@
             /* scoreOffset= */ 0,
             /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
             /* overrideToPortrait= */ false,
+            kDefaultDeviceId,
+            /* devicePolicy= */ 0,
             &unstableDevice);
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device: %s", __FUNCTION__,
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
index d7ab0d9..dc5c7f5 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -18,6 +18,7 @@
 #include <aidl/AidlUtils.h>
 #include <aidl/android/frameworks/cameraservice/common/Status.h>
 #include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
@@ -28,7 +29,10 @@
 using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
 
 binder::Status AidlCameraServiceListener::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+        int32_t status, const std::string& cameraId, int32_t deviceId) {
+    if (deviceId != kDefaultDeviceId) {
+        return binder::Status::ok();
+    }
     SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
     auto ret = mBase->onStatusChanged(sStatus, cameraId);
     LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
@@ -37,7 +41,10 @@
 
 binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
         int32_t status, const std::string& cameraId,
-        const std::string& physicalCameraId) {
+        const std::string& physicalCameraId, int32_t deviceId) {
+    if (deviceId != kDefaultDeviceId) {
+        return binder::Status::ok();
+    }
     SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
 
     auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, cameraId, physicalCameraId);
@@ -46,20 +53,22 @@
 }
 
 ::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
-    int32_t, const std::string&) {
+    [[maybe_unused]] int32_t, [[maybe_unused]] const std::string&, int32_t) {
   // We don't implement onTorchStatusChanged
   return binder::Status::ok();
 }
 
 ::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
-    const std::string&, int32_t) {
+    [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t, [[maybe_unused]] int32_t) {
     // We don't implement onTorchStrengthLevelChanged
     return binder::Status::ok();
 }
+
 status_t AidlCameraServiceListener::linkToDeath(const sp<DeathRecipient>& recipient, void* cookie,
                                                 uint32_t flags) {
     return mDeathPipe.linkToDeath(recipient, cookie, flags);
 }
+
 status_t AidlCameraServiceListener::unlinkToDeath(const wp<DeathRecipient>& recipient, void* cookie,
                                                   uint32_t flags,
                                                   wp<DeathRecipient>* outRecipient) {
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
index 6483fe1..a7c32e3 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -45,25 +45,28 @@
     ~AidlCameraServiceListener() = default;
 
     ::android::binder::Status onStatusChanged(int32_t status,
-            const std::string& cameraId) override;
+            const std::string& cameraId, int32_t deviceId) override;
     ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
             const std::string& cameraId,
-            const std::string& physicalCameraId) override;
+            const std::string& physicalCameraId,
+            int32_t deviceId) override;
 
     ::android::binder::Status onTorchStatusChanged(
-            int32_t status, const std::string& cameraId) override;
+            int32_t status, const std::string& cameraId, int32_t deviceId) override;
     ::android::binder::Status onTorchStrengthLevelChanged(
-            const std::string& cameraId, int32_t newStrengthLevel) override;
+            const std::string& cameraId, int32_t newStrengthLevel, int32_t deviceId) override;
     binder::Status onCameraAccessPrioritiesChanged() override {
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
-    binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageId*/) override {
+    binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // empty implementation
         return binder::Status::ok();
     }
-    binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
+    binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) override {
         // empty implementation
         return binder::Status::ok();
     }
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index f5d68eb..48d78e1 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -17,12 +17,14 @@
 #define LOG_TAG "AidlUtils"
 
 #include <aidl/AidlUtils.h>
+#include <aidl/ExtensionMetadataTags.h>
+#include <aidl/SessionCharacteristicsTags.h>
 #include <aidl/VndkVersionMetadataTags.h>
 #include <aidlcommonsupport/NativeHandle.h>
+#include <camera/StringUtils.h>
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
-#include <camera/StringUtils.h>
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -310,8 +312,8 @@
 
 status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
     if (vndkVersion == __ANDROID_API_FUTURE__) {
-        // VNDK version in ro.vndk.version is a version code-name that
-        // corresponds to the current version.
+        // VNDK version derived from ro.board.api_level is a version code-name that
+        // corresponds to the current SDK version.
         return OK;
     }
     const auto &apiLevelToKeys =
@@ -333,4 +335,90 @@
     return OK;
 }
 
+status_t copySessionCharacteristics(const CameraMetadata& from, CameraMetadata* to,
+                                    int queryVersion) {
+    // Ensure the vendor ID are the same before attempting
+    // anything else. If vendor IDs differ we cannot safely copy the characteristics.
+    if (from.getVendorId() != to->getVendorId()) {
+        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %lu; To: %lu",
+              __FUNCTION__, from.getVendorId(), to->getVendorId());
+        return BAD_VALUE;
+    }
+
+    // Allow public tags according to the queryVersion
+    std::unordered_set<uint32_t> validPublicTags;
+    auto last = api_level_to_session_characteristic_keys.upper_bound(queryVersion);
+    for (auto it = api_level_to_session_characteristic_keys.begin(); it != last; it++) {
+        validPublicTags.insert(it->second.cbegin(), it->second.cend());
+    }
+
+    const camera_metadata_t* src = from.getAndLock();
+    camera_metadata_ro_entry_t entry{};
+    for (size_t i = 0; i < get_camera_metadata_entry_count(src); i++) {
+        int ret = get_camera_metadata_ro_entry(src, i, &entry);
+        if (ret != OK) {
+            ALOGE("%s: Could not fetch entry at index %lu. Error: %d", __FUNCTION__, i, ret);
+            from.unlock(src);
+            return BAD_VALUE;
+        }
+
+        if (entry.tag < (uint32_t)VENDOR_SECTION_START &&
+                validPublicTags.find(entry.tag) == validPublicTags.end()) {
+            ALOGI("%s: Session Characteristics contains tag %s but not supported by query version "
+                  "(%d)",
+                  __FUNCTION__, get_camera_metadata_tag_name(entry.tag), queryVersion);
+            continue;
+        }
+
+        // The entry is either a vendor tag, or a valid session characteristic key.
+        // Copy over the value
+        to->update(entry);
+    }
+    from.unlock(src);
+    return OK;
+}
+
+bool areExtensionKeysSupported(const CameraMetadata& metadata) {
+    auto requestKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+    if (requestKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    auto resultKeys = metadata.find(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
+    if (resultKeys.count == 0) {
+        ALOGE("%s: No ANDROID_REQUEST_AVAILABLE_RESULT_KEYS entries!", __FUNCTION__);
+        return false;
+    }
+
+    for (const auto& extensionKey : extension_metadata_keys) {
+        if (std::find(requestKeys.data.i32, requestKeys.data.i32 + requestKeys.count, extensionKey)
+                != requestKeys.data.i32 + requestKeys.count) {
+            return true;
+        }
+
+        if (std::find(resultKeys.data.i32, resultKeys.data.i32 + resultKeys.count, extensionKey)
+                != resultKeys.data.i32 + resultKeys.count) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/) {
+    if (metadata == nullptr) {
+        return BAD_VALUE;
+    }
+
+    for (const auto& key : extension_metadata_keys) {
+        status_t res = metadata->erase(key);
+        if (res != OK) {
+            ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+            return res;
+        }
+    }
+    return OK;
+}
+
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.h b/services/camera/libcameraservice/aidl/AidlUtils.h
index c89d7ff..92e878e 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.h
+++ b/services/camera/libcameraservice/aidl/AidlUtils.h
@@ -122,6 +122,12 @@
 
 status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic = true);
 
+status_t copySessionCharacteristics(const CameraMetadata& from, CameraMetadata* to,
+                                    int queryVersion);
+
+bool areExtensionKeysSupported(const CameraMetadata& metadata);
+
+status_t filterExtensionKeys(CameraMetadata* metadata /*out*/);
 } // namespace android::hardware::cameraservice::utils::conversion::aidl
 
 #endif  // FRAMEWORKS_AV_SERVICES_CAMERA_LIBCAMERASERVICE_AIDL_AIDLUTILS_H_
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
new file mode 100644
index 0000000..86af36c
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from extensions_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::vector<camera_metadata_tag> extension_metadata_keys{
+            ANDROID_EXTENSION_STRENGTH,
+            ANDROID_EXTENSION_CURRENT_TYPE,
+            ANDROID_EFV_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_AUTO_ZOOM,
+            ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+            ANDROID_EFV_STABILIZATION_MODE,
+            ANDROID_EFV_TRANSLATE_VIEWPORT,
+            ANDROID_EFV_ROTATE_VIEWPORT,
+            ANDROID_EFV_PADDING_REGION,
+            ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+            ANDROID_EFV_TARGET_COORDINATES,
+};
diff --git a/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h b/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h
new file mode 100644
index 0000000..cefb8a6
--- /dev/null
+++ b/services/camera/libcameraservice/aidl/SessionCharacteristicsTags.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from session_characteristics_tags.mako. To be included in
+ * libcameraservice only by aidl/AidlUtils.cpp.
+ */
+
+/**
+ * Mapping of session characteristics to the INFO_SESSION_CONFIGURATION_QUERY_VERSION value
+ * at which they were introduced.
+ */
+std::map<int, std::vector<camera_metadata_tag>> api_level_to_session_characteristic_keys {
+        {35,
+         {
+                 ANDROID_CONTROL_ZOOM_RATIO_RANGE,
+                 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+         }},
+};
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index e403b97..7965474 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -78,6 +78,7 @@
           ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
           ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
           ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
+          ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
           ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
           ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
           ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
@@ -112,6 +113,15 @@
           ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
           ANDROID_CONTROL_SETTINGS_OVERRIDE,
           ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
+          ANDROID_EFV_AUTO_ZOOM,
+          ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
+          ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
+          ANDROID_EFV_PADDING_REGION,
+          ANDROID_EFV_PADDING_ZOOM_FACTOR,
+          ANDROID_EFV_ROTATE_VIEWPORT,
+          ANDROID_EFV_STABILIZATION_MODE,
+          ANDROID_EFV_TARGET_COORDINATES,
+          ANDROID_EFV_TRANSLATE_VIEWPORT,
           ANDROID_EXTENSION_CURRENT_TYPE,
           ANDROID_EXTENSION_STRENGTH,
           ANDROID_FLASH_STRENGTH_LEVEL,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index caa6424..8ec2e4f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -38,7 +38,6 @@
 #include "api1/client2/CallbackProcessor.h"
 #include "api1/client2/ZslProcessor.h"
 #include "device3/RotateAndCropMapper.h"
-#include "utils/CameraThreadState.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
 #define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
@@ -56,6 +55,7 @@
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         const std::optional<std::string>& clientFeatureId,
         const std::string& cameraDeviceId,
@@ -68,7 +68,8 @@
         bool overrideForPerfClass,
         bool overrideToPortrait,
         bool forceSlowJpegMode):
-        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper, clientPackageName,
+        Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
+                attributionAndPermissionUtils, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
                 clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
@@ -444,7 +445,7 @@
 
     binder::Status res = binder::Status::ok();
     // Allow both client and the cameraserver to disconnect at all times
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     if (callingPid != mClientPid && callingPid != mServicePid) return res;
 
     if (mDevice == 0) return res;
@@ -513,14 +514,14 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock icl(mBinderSerializationLock);
 
-    if (mClientPid != 0 && CameraThreadState::getCallingPid() != mClientPid) {
+    if (mClientPid != 0 && getCallingPid() != mClientPid) {
         ALOGE("%s: Camera %d: Connection attempt from pid %d; "
                 "current locked to pid %d", __FUNCTION__,
-                mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+                mCameraId, getCallingPid(), mClientPid);
         return BAD_VALUE;
     }
 
-    mClientPid = CameraThreadState::getCallingPid();
+    mClientPid = getCallingPid();
 
     mRemoteCallback = client;
     mSharedCameraCallbacks = client;
@@ -533,16 +534,16 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock icl(mBinderSerializationLock);
     ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d",
-            __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+            __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
 
     if (mClientPid == 0) {
-        mClientPid = CameraThreadState::getCallingPid();
+        mClientPid = getCallingPid();
         return OK;
     }
 
-    if (mClientPid != CameraThreadState::getCallingPid()) {
+    if (mClientPid != getCallingPid()) {
         ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d",
-                __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+                __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
         return EBUSY;
     }
 
@@ -554,9 +555,9 @@
     ALOGV("%s: E", __FUNCTION__);
     Mutex::Autolock icl(mBinderSerializationLock);
     ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d",
-            __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+            __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
 
-    if (mClientPid == CameraThreadState::getCallingPid()) {
+    if (mClientPid == getCallingPid()) {
         SharedParameters::Lock l(mParameters);
         if (l.mParameters.state == Parameters::RECORD ||
                 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
@@ -570,7 +571,7 @@
     }
 
     ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d",
-            __FUNCTION__, mCameraId, CameraThreadState::getCallingPid(), mClientPid);
+            __FUNCTION__, mCameraId, getCallingPid(), mClientPid);
     return EBUSY;
 }
 
@@ -1460,6 +1461,11 @@
     int triggerId;
     {
         SharedParameters::Lock l(mParameters);
+        if (l.mParameters.state == Parameters::DISCONNECTED) {
+            ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
+            return INVALID_OPERATION;
+        }
+
         // Canceling does nothing in FIXED or INFINITY modes
         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
                 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
@@ -1644,7 +1650,7 @@
     ALOGV("%s: Camera %d", __FUNCTION__, mCameraId);
     Mutex::Autolock icl(mBinderSerializationLock);
     // The camera service can unconditionally get the parameters at all times
-    if (CameraThreadState::getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
+    if (getCallingPid() != mServicePid && checkPid(__FUNCTION__) != OK) return String8();
 
     SharedParameters::ReadLock l(mParameters);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 2cb7af0..2654a25 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -103,6 +103,7 @@
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             const std::optional<std::string>& clientFeatureId,
             const std::string& cameraDeviceId,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3488629..105d04f 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -20,7 +20,6 @@
 
 #include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
-#include <utils/CameraThreadState.h>
 #include <utils/Log.h>
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
@@ -61,6 +60,7 @@
 CameraDeviceClientBase::CameraDeviceClientBase(
         const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
@@ -74,6 +74,7 @@
         bool overrideToPortrait) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
+            attributionAndPermissionUtils,
             clientPackageName,
             systemNativeClient,
             clientFeatureId,
@@ -92,6 +93,7 @@
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
@@ -104,7 +106,8 @@
         bool overrideForPerfClass,
         bool overrideToPortrait,
         const std::string& originalCameraId) :
-    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
+    Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
+            attributionAndPermissionUtils, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
             overrideToPortrait),
@@ -534,27 +537,28 @@
 
         // Save certain CaptureRequest settings
         if (!request.mUserTag.empty()) {
-            mUserTag = request.mUserTag;
+            mRunningSessionStats.mUserTag = request.mUserTag;
         }
         camera_metadata_entry entry =
                 physicalSettingsList.begin()->metadata.find(
                         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
         if (entry.count == 1) {
-            mVideoStabilizationMode = entry.data.u8[0];
+            mRunningSessionStats.mVideoStabilizationMode = entry.data.u8[0];
         }
-        if (flags::log_ultrawide_usage()) {
+
+        if (!mRunningSessionStats.mUsedUltraWide && flags::log_ultrawide_usage()) {
             entry = physicalSettingsList.begin()->metadata.find(
                     ANDROID_CONTROL_ZOOM_RATIO);
             if (entry.count == 1 && entry.data.f[0] < 1.0f ) {
-                mUsedUltraWide = true;
+                mRunningSessionStats.mUsedUltraWide = true;
             }
         }
-        if (!mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
+        if (!mRunningSessionStats.mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
             entry = physicalSettingsList.begin()->metadata.find(
                     ANDROID_CONTROL_SETTINGS_OVERRIDE);
             if (entry.count == 1 && entry.data.i32[0] ==
                     ANDROID_CONTROL_SETTINGS_OVERRIDE_ZOOM) {
-                mUsedSettingsOverrideZoom = true;
+                mRunningSessionStats.mUsedSettingsOverrideZoom = true;
             }
         }
     }
@@ -777,60 +781,6 @@
     return res;
 }
 
-binder::Status CameraDeviceClient::getSessionCharacteristics(
-        const SessionConfiguration& sessionConfiguration,
-        /*out*/
-        hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) {
-    ATRACE_CALL();
-    binder::Status res;
-    status_t ret = OK;
-    if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
-
-    Mutex::Autolock icl(mBinderSerializationLock);
-
-    if (!mDevice.get()) {
-        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
-    }
-
-    auto operatingMode = sessionConfiguration.getOperatingMode();
-    res = SessionConfigurationUtils::checkOperatingMode(operatingMode, mDevice->info(),
-            mCameraIdStr);
-    if (!res.isOk()) {
-        return res;
-    }
-
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {
-          return mDevice->infoPhysical(id);};
-    ret = mProviderManager->getSessionCharacteristics(mCameraIdStr.c_str(),
-            sessionConfiguration, mOverrideForPerfClass, getMetadata,
-            sessionCharacteristics);
-
-    switch (ret) {
-        case OK:
-            // Expected, do nothing.
-            break;
-        case INVALID_OPERATION: {
-                std::string msg = fmt::sprintf(
-                        "Camera %s: Session characteristics query not supported!",
-                        mCameraIdStr.c_str());
-                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
-            }
-
-            break;
-        default: {
-                std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", mCameraIdStr.c_str(),
-                        strerror(-ret), ret);
-                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        msg.c_str());
-            }
-    }
-
-    return res;
-}
-
 binder::Status CameraDeviceClient::deleteStream(int streamId) {
     ATRACE_CALL();
     ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId);
@@ -942,6 +892,11 @@
 
     Mutex::Autolock icl(mBinderSerializationLock);
 
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
+
     const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
     size_t numBufferProducers = bufferProducers.size();
@@ -958,7 +913,7 @@
     bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();
 
     res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
-            outputConfiguration.getSurfaceType());
+            outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
     if (!res.isOk()) {
         return res;
     }
@@ -1001,7 +956,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                 isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace);
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
         if (!res.isOk())
             return res;
@@ -1114,6 +1069,10 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
 
     // Infer the surface info for deferred surface stream creation.
     width = outputConfiguration.getWidth();
@@ -1306,6 +1265,10 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
 
     const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
@@ -1373,7 +1336,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                 /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                 mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace);
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
         if (!res.isOk())
             return res;
 
@@ -1686,6 +1649,11 @@
 
     Mutex::Autolock icl(mBinderSerializationLock);
 
+    if (!outputConfiguration.isComplete()) {
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                "OutputConfiguration isn't valid!");
+    }
+
     const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
     const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
@@ -1751,7 +1719,7 @@
         res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                 true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                 mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
-                streamUseCase, timestampBase, mirrorMode, colorSpace);
+                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
 
         if (!res.isOk())
             return res;
@@ -1954,9 +1922,9 @@
     sp<CameraOfflineSessionClient> offlineClient;
     if (offlineSession.get() != nullptr) {
         offlineClient = new CameraOfflineSessionClient(sCameraService,
-                offlineSession, offlineCompositeStreamMap, cameraCb, mClientPackageName,
-                mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation, mClientPid, mClientUid,
-                mServicePid);
+                offlineSession, offlineCompositeStreamMap, cameraCb, mAttributionAndPermissionUtils,
+                mClientPackageName, mClientFeatureId, mCameraIdStr, mCameraFacing, mOrientation,
+                mClientPid, mClientUid, mServicePid);
         ret = sCameraService->addOfflineClient(mCameraIdStr, offlineClient);
     }
 
@@ -2097,6 +2065,7 @@
 
 void CameraDeviceClient::notifyIdle(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        std::pair<int32_t, int32_t> mostRequestedFpsRange,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
     // Thread safe. Don't bother locking.
     sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
@@ -2117,8 +2086,12 @@
         }
     }
     Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
-            fullStreamStats, mUserTag, mVideoStabilizationMode, mUsedUltraWide,
-            mUsedSettingsOverrideZoom);
+            mostRequestedFpsRange,
+            fullStreamStats,
+            mRunningSessionStats.mUserTag,
+            mRunningSessionStats.mVideoStabilizationMode,
+            mRunningSessionStats.mUsedUltraWide,
+            mRunningSessionStats.mUsedSettingsOverrideZoom);
 }
 
 void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -2236,7 +2209,7 @@
 // TODO: move to Camera2ClientBase
 bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) {
 
-    const int pid = CameraThreadState::getCallingPid();
+    const int pid = getCallingPid();
     const int selfPid = getpid();
     camera_metadata_entry_t entry;
 
@@ -2275,7 +2248,7 @@
         String16 permissionString =
             toString16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
         if (!checkCallingPermission(permissionString)) {
-            const int uid = CameraThreadState::getCallingUid();
+            const int uid = getCallingUid();
             ALOGE("Permission Denial: "
                   "can't disable transmit LED pid=%d, uid=%d", pid, uid);
             return false;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index c2f7f56..505c086 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -50,6 +50,7 @@
 protected:
     CameraDeviceClientBase(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId,
@@ -109,11 +110,6 @@
             /*out*/
             bool* streamStatus) override;
 
-    virtual binder::Status getSessionCharacteristics(
-            const SessionConfiguration& sessionConfiguration,
-            /*out*/
-            hardware::camera2::impl::CameraMetadataNative* sessionCharacteristics) override;
-
     // Returns -EBUSY if device is not idle or in error state
     virtual binder::Status deleteStream(int streamId) override;
 
@@ -186,6 +182,7 @@
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             bool clientPackageOverride,
             const std::optional<std::string>& clientFeatureId,
@@ -232,6 +229,7 @@
      */
 
     virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+                            std::pair<int32_t, int32_t> mostRequestedFpsRange,
                             const std::vector<hardware::CameraStreamStats>& streamStats);
     virtual void notifyError(int32_t errorCode,
                              const CaptureResultExtras& resultExtras);
@@ -367,14 +365,17 @@
     // Override the camera characteristics for performance class primary cameras.
     bool mOverrideForPerfClass;
 
-    // The string representation of object passed into CaptureRequest.setTag.
-    std::string mUserTag;
-    // The last set video stabilization mode
-    int mVideoStabilizationMode = -1;
-    // Whether a zoom_ratio < 1.0 has been used during this session
-    bool mUsedUltraWide = false;
-    // Whether a zoom settings override has been used during this session
-    bool mUsedSettingsOverrideZoom = false;
+    // Various fields used to collect session statistics
+    struct RunningSessionStats {
+        // The string representation of object passed into CaptureRequest.setTag.
+        std::string mUserTag;
+        // The last set video stabilization mode
+        int mVideoStabilizationMode = -1;
+        // Whether a zoom_ratio < 1.0 has been used during this session
+        bool mUsedUltraWide = false;
+        // Whether a zoom settings override has been used during this session
+        bool mUsedSettingsOverrideZoom = false;
+    } mRunningSessionStats;
 
     // This only exists in case of camera ID Remapping.
     const std::string mOriginalCameraId;
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index dc9e0c1..9a1fdd6 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -19,7 +19,6 @@
 //#define LOG_NDEBUG 0
 
 #include "CameraOfflineSessionClient.h"
-#include "utils/CameraThreadState.h"
 #include <utils/Trace.h>
 #include <camera/StringUtils.h>
 
@@ -163,7 +162,7 @@
         return res;
     }
     // Allow both client and the media server to disconnect at all times
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = getCallingPid();
     if (callingPid != mClientPid &&
             callingPid != mServicePid) {
         return res;
@@ -326,6 +325,7 @@
 
 void CameraOfflineSessionClient::notifyIdle(
         int64_t /*requestCount*/, int64_t /*resultErrorCount*/, bool /*deviceError*/,
+        std::pair<int32_t, int32_t> /*mostRequestedFpsRange*/,
         const std::vector<hardware::CameraStreamStats>& /*streamStats*/) {
     if (mRemoteCallback.get() != nullptr) {
         mRemoteCallback->onDeviceIdle();
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 804498f..82c3d6d 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -47,6 +47,7 @@
             sp<CameraOfflineSessionBase> session,
             const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& clientPackageName,
             const std::optional<std::string>& clientFeatureId,
             const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
@@ -54,6 +55,7 @@
             CameraService::BasicClient(
                     cameraService,
                     IInterface::asBinder(remoteCallback),
+                    attributionAndPermissionUtils,
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
                     cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
@@ -110,6 +112,7 @@
     void notifyShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
     status_t notifyActive(float maxPreviewFps) override;
     void notifyIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+            std::pair<int32_t, int32_t> mostRequestedFpsRange,
             const std::vector<hardware::CameraStreamStats>& streamStats) override;
     void notifyAutoFocus(uint8_t newState, int triggerId) override;
     void notifyAutoExposure(uint8_t newState, int triggerId) override;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 988446b..a1b9383 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -856,8 +856,9 @@
 
     bool deviceError;
     std::map<int, StreamStats> stats;
+    std::pair<int32_t, int32_t> mostRequestedFps;
     mSessionStatsBuilder.buildAndReset(&streamStats->mRequestCount, &streamStats->mErrorCount,
-            &deviceError, &stats);
+            &deviceError, &mostRequestedFps, &stats);
     if (stats.find(mP010StreamId) != stats.end()) {
         streamStats->mWidth = mBlobWidth;
         streamStats->mHeight = mBlobHeight;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 959bd3c..2239c9f 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -37,7 +37,6 @@
 #include "device3/Camera3Device.h"
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
-#include "utils/CameraThreadState.h"
 
 namespace android {
 
@@ -50,6 +49,7 @@
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& clientPackageName,
         bool systemNativeClient,
         const std::optional<std::string>& clientFeatureId,
@@ -63,9 +63,9 @@
         bool overrideForPerfClass,
         bool overrideToPortrait,
         bool legacyClient):
-        TClientBase(cameraService, remoteCallback, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideToPortrait),
+        TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
+                systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
+                sensorOrientation, clientPid, clientUid, servicePid, overrideToPortrait),
         mSharedCameraCallbacks(remoteCallback),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
@@ -82,7 +82,7 @@
 status_t Camera2ClientBase<TClientBase>::checkPid(const char* checkLocation)
         const {
 
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = TClientBase::getCallingPid();
     if (callingPid == TClientBase::mClientPid) return NO_ERROR;
 
     ALOGE("%s: attempt to use a locked camera from a different process"
@@ -115,12 +115,14 @@
         case IPCTransport::HIDL:
             mDevice =
                     new HidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
                             TClientBase::mOverrideToPortrait, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
                     new AidlCamera3Device(mCameraServiceProxyWrapper,
+                            TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
                             TClientBase::mOverrideToPortrait, mLegacyClient);
              break;
@@ -267,7 +269,7 @@
     ALOGD("Camera %s: serializationLock acquired", TClientBase::mCameraIdStr.c_str());
     binder::Status res = binder::Status::ok();
     // Allow both client and the media server to disconnect at all times
-    int callingPid = CameraThreadState::getCallingPid();
+    int callingPid = TClientBase::getCallingPid();
     if (callingPid != TClientBase::mClientPid &&
         callingPid != TClientBase::mServicePid) return res;
 
@@ -306,18 +308,18 @@
     Mutex::Autolock icl(mBinderSerializationLock);
 
     if (TClientBase::mClientPid != 0 &&
-        CameraThreadState::getCallingPid() != TClientBase::mClientPid) {
+        TClientBase::getCallingPid() != TClientBase::mClientPid) {
 
         ALOGE("%s: Camera %s: Connection attempt from pid %d; "
                 "current locked to pid %d",
                 __FUNCTION__,
                 TClientBase::mCameraIdStr.c_str(),
-                CameraThreadState::getCallingPid(),
+                TClientBase::getCallingPid(),
                 TClientBase::mClientPid);
         return BAD_VALUE;
     }
 
-    TClientBase::mClientPid = CameraThreadState::getCallingPid();
+    TClientBase::mClientPid = TClientBase::getCallingPid();
 
     TClientBase::mRemoteCallback = client;
     mSharedCameraCallbacks = client;
@@ -378,6 +380,7 @@
 template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+        std::pair<int32_t, int32_t> mostRequestedFpsRange,
         const std::vector<hardware::CameraStreamStats>& streamStats,
         const std::string& userTag, int videoStabilizationMode, bool usedUltraWide,
         bool usedZoomOverride) {
@@ -389,7 +392,7 @@
         }
         mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
-                usedUltraWide, usedZoomOverride, streamStats);
+                usedUltraWide, usedZoomOverride, mostRequestedFpsRange, streamStats);
     }
     mDeviceActive = false;
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 2bb90d9..c24f92b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -21,6 +21,7 @@
 #include "camera/CameraMetadata.h"
 #include "camera/CaptureResult.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/AttributionAndPermissionUtils.h"
 #include "CameraServiceWatchdog.h"
 
 namespace android {
@@ -51,6 +52,7 @@
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
                       std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
+                      std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
                       const std::string& clientPackageName,
                       bool systemNativeClient,
                       const std::optional<std::string>& clientFeatureId,
@@ -84,6 +86,7 @@
     virtual status_t      notifyActive(float maxPreviewFps);
     virtual void          notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
                                      bool /*deviceError*/,
+                                     std::pair<int32_t, int32_t> /*mostRequestedFpsRange*/,
                                      const std::vector<hardware::CameraStreamStats>&) {}
     virtual void          notifyShutter(const CaptureResultExtras& resultExtras,
                                         nsecs_t timestamp);
@@ -97,6 +100,7 @@
 
     void                  notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
                                      bool deviceError,
+                                     std::pair<int32_t, int32_t> mostRequestedFpsRange,
                                      const std::vector<hardware::CameraStreamStats>& streamStats,
                                      const std::string& userTag, int videoStabilizationMode,
                                      bool usedUltraWide, bool usedZoomOverride);
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 976c47c..b1ba761 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -46,6 +46,7 @@
     // May return an error since it checks appops
     virtual status_t notifyActive(float maxPreviewFps) = 0;
     virtual void notifyIdle(int64_t requestCount, int64_t resultError, bool deviceError,
+            std::pair<int32_t, int32_t> mostRequestedFpsRange,
             const std::vector<hardware::CameraStreamStats>& streamStats) = 0;
 
     // Required only for API2
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 1ba3de4..45c3a1f 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -33,6 +33,7 @@
 #include <future>
 #include <inttypes.h>
 #include <android_companion_virtualdevice_flags.h>
+#include <android_companion_virtualdevice_build_flags.h>
 #include <android/binder_manager.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
@@ -139,7 +140,7 @@
 }
 
 std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-CameraProviderManager::AidlServiceInteractionProxyImpl::getAidlService(
+CameraProviderManager::AidlServiceInteractionProxyImpl::getService(
         const std::string& serviceName) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
@@ -147,19 +148,35 @@
     if (flags::lazy_aidl_wait_for_service()) {
         binder = AServiceManager_waitForService(serviceName.c_str());
     } else {
-        binder = AServiceManager_getService(serviceName.c_str());
+        binder = AServiceManager_checkService(serviceName.c_str());
     }
 
     if (binder == nullptr) {
-        ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
-              serviceName.c_str());
+        ALOGE("%s: AIDL Camera provider HAL '%s' is not actually available, despite waiting "
+              "indefinitely?", __FUNCTION__, serviceName.c_str());
         return nullptr;
     }
     std::shared_ptr<ICameraProvider> interface =
             ICameraProvider::fromBinder(ndk::SpAIBinder(binder));
 
     return interface;
-};
+}
+
+std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+CameraProviderManager::AidlServiceInteractionProxyImpl::tryGetService(
+        const std::string& serviceName) {
+    using aidl::android::hardware::camera::provider::ICameraProvider;
+
+    std::shared_ptr<ICameraProvider> interface = ICameraProvider::fromBinder(
+                    ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+    if (interface == nullptr) {
+        ALOGD("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
+              serviceName.c_str());
+        return nullptr;
+    }
+
+    return interface;
+}
 
 static std::string getFullAidlProviderName(const std::string instance) {
     std::string aidlHalServiceDescriptor =
@@ -404,8 +421,15 @@
         return NAME_NOT_FOUND;
     }
 
+    metadataGetter getMetadata = [this](const std::string &id,
+            bool overrideForPerfClass) {
+        CameraMetadata metadata;
+        this->getCameraCharacteristicsLocked(id, overrideForPerfClass,
+                                             &metadata, /*overrideToPortrait*/false);
+        return metadata;
+    };
     return deviceInfo->isSessionConfigurationSupported(configuration,
-            overrideForPerfClass, checkSessionParams, status);
+            overrideForPerfClass, getMetadata, checkSessionParams, status);
 }
 
 status_t  CameraProviderManager::createDefaultRequest(const std::string& cameraId,
@@ -422,9 +446,8 @@
         return NAME_NOT_FOUND;
     }
 
-    camera_metadata_t *rawRequest;
     status_t res = deviceInfo->createDefaultRequest(templateId,
-            &rawRequest);
+            metadata);
 
     if (res == BAD_VALUE) {
         ALOGI("%s: template %d is not supported on this camera device",
@@ -436,25 +459,34 @@
         return res;
     }
 
-    set_camera_metadata_vendor_id(rawRequest, deviceInfo->mProviderTagid);
-    metadata->acquire(rawRequest);
-
     return OK;
 }
 
-status_t CameraProviderManager::getSessionCharacteristics(const std::string& id,
-        const SessionConfiguration &configuration, bool overrideForPerfClass,
-        metadataGetter getMetadata,
-        CameraMetadata* sessionCharacteristics /*out*/) const {
+status_t CameraProviderManager::getSessionCharacteristics(
+        const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
+        bool overrideToPortrait, CameraMetadata* sessionCharacteristics /*out*/) const {
     if (!flags::feature_combination_query()) {
         return INVALID_OPERATION;
     }
+
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) {
         return NAME_NOT_FOUND;
     }
 
+    metadataGetter getMetadata = [this, overrideToPortrait](const std::string& id,
+                                                            bool overrideForPerfClass) {
+        CameraMetadata metadata;
+        status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
+                                                            overrideToPortrait);
+        if (ret != OK) {
+            ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
+                  id.c_str());
+        }
+        return metadata;
+    };
+
     return deviceInfo->getSessionCharacteristics(configuration,
             overrideForPerfClass, getMetadata, sessionCharacteristics);
 }
@@ -1062,20 +1094,6 @@
     }
 }
 
-CameraMetadata CameraProviderManager::ProviderInfo::DeviceInfo3::deviceInfo(
-        const std::string &id) {
-    if (id.empty()) {
-        return mCameraCharacteristics;
-    } else {
-        if (mPhysicalCameraCharacteristics.find(id) != mPhysicalCameraCharacteristics.end()) {
-            return mPhysicalCameraCharacteristics.at(id);
-        } else {
-            ALOGE("%s: Invalid physical camera id %s", __FUNCTION__, id.c_str());
-            return mCameraCharacteristics;
-        }
-    }
-}
-
 SystemCameraKind CameraProviderManager::ProviderInfo::DeviceInfo3::getSystemCameraKind() {
     camera_metadata_entry_t entryCap;
     entryCap = mCameraCharacteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
@@ -1807,18 +1825,13 @@
     auto& c = mCameraCharacteristics;
 
     auto entry = c.find(ANDROID_SENSOR_READOUT_TIMESTAMP);
-    if (entry.count != 0) {
-        ALOGE("%s: CameraCharacteristics must not contain ANDROID_SENSOR_READOUT_TIMESTAMP!",
-                __FUNCTION__);
+    if (entry.count == 0) {
+        uint8_t defaultReadoutTimestamp = readoutTimestampSupported ?
+                                          ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE :
+                                          ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
+        res = c.update(ANDROID_SENSOR_READOUT_TIMESTAMP, &defaultReadoutTimestamp, 1);
     }
 
-    uint8_t readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED;
-    if (readoutTimestampSupported) {
-        readoutTimestamp = ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
-    }
-
-    res = c.update(ANDROID_SENSOR_READOUT_TIMESTAMP, &readoutTimestamp, 1);
-
     return res;
 }
 
@@ -1838,7 +1851,7 @@
 
     auto& c = mCameraCharacteristics;
     status_t res = c.update(ANDROID_INFO_SESSION_CONFIGURATION_QUERY_VERSION, &versionCode, 1);
-
+    mSessionConfigQueryVersion = versionCode;
     return res;
 }
 
@@ -2101,8 +2114,14 @@
         const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
-    std::shared_ptr<ICameraProvider> interface =
-            mAidlServiceProxy->getAidlService(providerName.c_str());
+    std::shared_ptr<ICameraProvider> interface;
+    if (flags::delay_lazy_hal_instantiation()) {
+        // Only get remote instance if already running. Lazy Providers will be
+        // woken up later.
+        interface = mAidlServiceProxy->tryGetService(providerName);
+    } else {
+        interface = mAidlServiceProxy->getService(providerName);
+    }
 
     if (interface == nullptr) {
         ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -3249,7 +3268,8 @@
 }
 
 bool CameraProviderManager::isVirtualCameraHalEnabled() {
-    return vd_flags::virtual_camera_service_discovery();
+    return vd_flags::virtual_camera_service_discovery() &&
+           vd_flags::virtual_camera_service_build_flag();
 }
 
 } // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 53a2102..248227d 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -178,9 +178,15 @@
     // Proxy to inject fake services in test.
     class AidlServiceInteractionProxy {
       public:
-        // Returns the Aidl service with the given serviceName
+        // Returns the Aidl service with the given serviceName. Will wait indefinitely
+        // for the service to come up if not running.
         virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-        getAidlService(const std::string& serviceName) = 0;
+        getService(const std::string& serviceName) = 0;
+
+        // Attempts to get an already running AIDL service of the given serviceName.
+        // Returns nullptr immediately if service is not running.
+        virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+        tryGetService(const std::string& serviceName) = 0;
 
         virtual ~AidlServiceInteractionProxy() = default;
     };
@@ -190,7 +196,10 @@
     class AidlServiceInteractionProxyImpl : public AidlServiceInteractionProxy {
       public:
         virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-        getAidlService(const std::string& serviceName) override;
+        getService(const std::string& serviceName) override;
+
+        virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+        tryGetService(const std::string& serviceName) override;
     };
 
     /**
@@ -321,7 +330,8 @@
      */
      status_t getSessionCharacteristics(const std::string& id,
             const SessionConfiguration &configuration,
-            bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+            bool overrideForPerfClass,
+            bool overrideToPortrait,
             CameraMetadata* sessionCharacteristics /*out*/) const;
 
     /**
@@ -636,6 +646,7 @@
             virtual status_t isSessionConfigurationSupported(
                     const SessionConfiguration &/*configuration*/,
                     bool /*overrideForPerfClass*/,
+                    camera3::metadataGetter /*getMetadata*/,
                     bool /*checkSessionParams*/,
                     bool * /*status*/) {
                 return INVALID_OPERATION;
@@ -644,8 +655,7 @@
             virtual status_t getSessionCharacteristics(
                     const SessionConfiguration &/*configuration*/,
                     bool /*overrideForPerfClass*/,
-                    camera3::metadataGetter /*getMetadata*/,
-                    CameraMetadata* /*sessionCharacteristics*/) {
+                    camera3::metadataGetter /*getMetadata*/, CameraMetadata* /*outChars*/) {
                 return INVALID_OPERATION;
             }
 
@@ -653,7 +663,7 @@
             virtual void notifyDeviceStateChange(int64_t /*newState*/) {}
             virtual status_t createDefaultRequest(
                     camera3::camera_request_template_t /*templateId*/,
-                    camera_metadata_t** /*metadata*/) {
+                    CameraMetadata* /*metadata*/) {
                 return INVALID_OPERATION;
             }
 
@@ -727,6 +737,10 @@
             // Only contains characteristics for hidden physical cameras,
             // not for public physical cameras.
             std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
+            // Value filled in from addSessionConfigQueryVersionTag.
+            // Cached to make lookups faster
+            int mSessionConfigQueryVersion = 0;
+
             void queryPhysicalCameraIds();
             SystemCameraKind getSystemCameraKind();
             status_t fixupMonochromeTags();
@@ -764,8 +778,6 @@
                     std::vector<int64_t>* stallDurations,
                     const camera_metadata_entry& halStreamConfigs,
                     const camera_metadata_entry& halStreamDurations);
-
-            CameraMetadata deviceInfo(const std::string &id);
         };
     protected:
         std::string mType;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index d773af3..41e0cd1 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -17,6 +17,8 @@
 #include "common/HalConversionsTemplated.h"
 #include "common/CameraProviderInfoTemplated.h"
 
+#include <aidl/AidlUtils.h>
+
 #include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 
@@ -26,6 +28,7 @@
 #include <android/hardware/ICameraService.h>
 #include <camera_metadata_hidden.h>
 
+#include "device3/DistortionMapper.h"
 #include "device3/ZoomRatioMapper.h"
 #include <utils/SessionConfigurationUtils.h>
 #include <utils/Trace.h>
@@ -42,8 +45,10 @@
 
 using namespace aidl::android::hardware;
 using namespace hardware::camera;
+using android::hardware::cameraservice::utils::conversion::aidl::copySessionCharacteristics;
 using hardware::camera2::utils::CameraIdAndSessionConfiguration;
 using hardware::ICameraService;
+using SessionConfigurationUtils::overrideDefaultRequestKeys;
 
 using HalDeviceStatusType = aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ICameraProvider = aidl::android::hardware::camera::provider::ICameraProvider;
@@ -275,54 +280,58 @@
     if (mSavedInterface != nullptr) {
         return mSavedInterface;
     }
+
     if (!kEnableLazyHal) {
         ALOGE("Bad provider state! Should not be here on a non-lazy HAL!");
         return nullptr;
     }
 
     auto interface = mActiveInterface.lock();
-    if (interface == nullptr) {
-        // Try to get service without starting
-        interface =
-                    ICameraProvider::fromBinder(
-                            ndk::SpAIBinder(AServiceManager_checkService(mProviderName.c_str())));
-        if (interface == nullptr) {
-            ALOGV("Camera provider actually needs restart, calling getService(%s)",
-                  mProviderName.c_str());
-            interface = mManager->mAidlServiceProxy->getAidlService(mProviderName.c_str());
-
-            if (interface == nullptr) {
-                ALOGD("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
-                return nullptr;
-            }
-
-            // Set all devices as ENUMERATING, provider should update status
-            // to PRESENT after initializing.
-            // This avoids failing getCameraDeviceInterface_V3_x before devices
-            // are ready.
-            for (auto& device : mDevices) {
-              device->mIsDeviceAvailable = false;
-            }
-
-            interface->setCallback(mCallbacks);
-            auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(),
-                    this);
-            if (link != STATUS_OK) {
-                ALOGW("%s: Unable to link to provider '%s' death notifications",
-                        __FUNCTION__, mProviderName.c_str());
-                mManager->removeProvider(mProviderInstance);
-                return nullptr;
-            }
-
-            // Send current device state
-            interface->notifyDeviceStateChange(mDeviceState);
-        }
-        mActiveInterface = interface;
-    } else {
-        ALOGV("Camera provider (%s) already in use. Re-using instance.",
-              mProviderName.c_str());
+    if (interface != nullptr) {
+        ALOGV("Camera provider (%s) already in use. Re-using instance.", mProviderName.c_str());
+        return interface;
     }
 
+    // Try to get service without starting
+    interface = ICameraProvider::fromBinder(
+            ndk::SpAIBinder(AServiceManager_checkService(mProviderName.c_str())));
+    if (interface != nullptr) {
+        // Service is already running. Cache and return.
+        mActiveInterface = interface;
+        return interface;
+    }
+
+    ALOGV("Camera provider actually needs restart, calling getService(%s)", mProviderName.c_str());
+    interface = mManager->mAidlServiceProxy->getService(mProviderName);
+
+    if (interface == nullptr) {
+        ALOGE("%s: %s service not started", __FUNCTION__, mProviderName.c_str());
+        return nullptr;
+    }
+
+    // Set all devices as ENUMERATING, provider should update status
+    // to PRESENT after initializing.
+    // This avoids failing getCameraDeviceInterface_V3_x before devices
+    // are ready.
+    for (auto& device : mDevices) {
+      device->mIsDeviceAvailable = false;
+    }
+
+    interface->setCallback(mCallbacks);
+    auto link = AIBinder_linkToDeath(interface->asBinder().get(), mDeathRecipient.get(),
+            this);
+    if (link != STATUS_OK) {
+        ALOGW("%s: Unable to link to provider '%s' death notifications",
+                __FUNCTION__, mProviderName.c_str());
+        mManager->removeProvider(mProviderInstance);
+        return nullptr;
+    }
+
+    // Send current device state
+    interface->notifyDeviceStateChange(mDeviceState);
+    // Cache interface to return early for future calls.
+    mActiveInterface = interface;
+
     return interface;
 }
 
@@ -686,6 +695,14 @@
         }
     }
 
+    int deviceVersion = HARDWARE_DEVICE_API_VERSION(mVersion.get_major(), mVersion.get_minor());
+    if (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_3) {
+        // This additional set of request keys must match the ones specified
+        // in ICameraDevice.isSessionConfigurationWithSettingsSupported.
+        mAdditionalKeysForFeatureQuery.insert(mAdditionalKeysForFeatureQuery.end(),
+                {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, ANDROID_CONTROL_AE_TARGET_FPS_RANGE});
+    }
+
     if (!kEnableLazyHal) {
         // Save HAL reference indefinitely
         mSavedInterface = interface;
@@ -783,7 +800,7 @@
 
 status_t AidlProviderInfo::AidlDeviceInfo3::isSessionConfigurationSupported(
         const SessionConfiguration &configuration, bool overrideForPerfClass,
-        bool checkSessionParams, bool *status) {
+        camera3::metadataGetter getMetadata, bool checkSessionParams, bool *status) {
 
     auto operatingMode = configuration.getOperatingMode();
 
@@ -795,12 +812,10 @@
 
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {return this->deviceInfo(id);};
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
             mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
-            checkSessionParams, &earlyExit);
+            checkSessionParams, mAdditionalKeysForFeatureQuery, &earlyExit);
 
     if (!bRes.isOk()) {
         return UNKNOWN_ERROR;
@@ -847,7 +862,7 @@
 }
 
 status_t AidlProviderInfo::AidlDeviceInfo3::createDefaultRequest(
-        camera3::camera_request_template_t templateId, camera_metadata_t** metadata) {
+        camera3::camera_request_template_t templateId, CameraMetadata* metadata) {
     const std::shared_ptr<camera::device::ICameraDevice> interface =
             startDeviceInterface();
 
@@ -883,11 +898,12 @@
     }
     const camera_metadata *r =
             reinterpret_cast<const camera_metadata_t*>(request.metadata.data());
+    camera_metadata *rawRequest  = nullptr;
     size_t expectedSize = request.metadata.size();
     int ret = validate_camera_metadata_structure(r, &expectedSize);
     if (ret == OK || ret == CAMERA_METADATA_VALIDATION_SHIFTED) {
-        *metadata = clone_camera_metadata(r);
-        if (*metadata == nullptr) {
+        rawRequest = clone_camera_metadata(r);
+        if (rawRequest == nullptr) {
             ALOGE("%s: Unable to clone camera metadata received from HAL",
                     __FUNCTION__);
             res = UNKNOWN_ERROR;
@@ -897,18 +913,28 @@
         res = UNKNOWN_ERROR;
     }
 
+    set_camera_metadata_vendor_id(rawRequest, mProviderTagid);
+    metadata->acquire(rawRequest);
+
+    res = overrideDefaultRequestKeys(metadata);
+    if (res != OK) {
+        ALOGE("Unabled to override default request keys: %s (%d)",
+                strerror(-res), res);
+        return res;
+    }
+
     return res;
 }
 
 status_t AidlProviderInfo::AidlDeviceInfo3::getSessionCharacteristics(
         const SessionConfiguration &configuration, bool overrideForPerfClass,
-        camera3::metadataGetter getMetadata, CameraMetadata *sessionCharacteristics) {
+        camera3::metadataGetter getMetadata, CameraMetadata* outChars) {
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
     auto res = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
             mPhysicalIds, streamConfiguration, overrideForPerfClass, mProviderTagid,
-            /*checkSessionParams*/true, &earlyExit);
+            /*checkSessionParams*/true, mAdditionalKeysForFeatureQuery, &earlyExit);
 
     if (!res.isOk()) {
         return UNKNOWN_ERROR;
@@ -928,24 +954,32 @@
     aidl::android::hardware::camera::device::CameraMetadata chars;
     ::ndk::ScopedAStatus ret =
         interface->getSessionCharacteristics(streamConfiguration, &chars);
-    std::vector<uint8_t> &metadata = chars.metadata;
+    if (!ret.isOk()) {
+        ALOGE("%s: Unexpected binder error while getting session characteristics (%d): %s",
+              __FUNCTION__, ret.getExceptionCode(), ret.getMessage());
+        return mapToStatusT(ret);
+    }
 
-    camera_metadata_t *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
+    std::vector<uint8_t> &metadata = chars.metadata;
+    auto *buffer = reinterpret_cast<camera_metadata_t*>(metadata.data());
     size_t expectedSize = metadata.size();
     int resV = validate_camera_metadata_structure(buffer, &expectedSize);
     if (resV == OK || resV == CAMERA_METADATA_VALIDATION_SHIFTED) {
         set_camera_metadata_vendor_id(buffer, mProviderTagid);
-        *sessionCharacteristics = buffer;
     } else {
         ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
         return BAD_VALUE;
     }
 
-    if (!ret.isOk()) {
-        ALOGE("%s: Unexpected binder error: %s", __FUNCTION__, ret.getMessage());
-        return mapToStatusT(ret);
-    }
-    return OK;
+    CameraMetadata rawSessionChars;
+    rawSessionChars = buffer;  //  clone buffer
+    rawSessionChars.sort();    // sort for faster lookups
+
+    *outChars = mCameraCharacteristics;
+    outChars->sort();  // sort for faster reads and (hopefully!) writes
+
+    return copySessionCharacteristics(/*from=*/rawSessionChars, /*to=*/outChars,
+                                      mSessionConfigQueryVersion);
 }
 
 status_t AidlProviderInfo::convertToAidlHALStreamCombinationAndCameraIdsLocked(
@@ -988,7 +1022,8 @@
                     mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, mProviderTagid,
-                    /*checkSessionParams*/false, &shouldExit);
+                    /*checkSessionParams*/false, /*additionalKeys*/{},
+                    &shouldExit);
         if (!bStatus.isOk()) {
             ALOGE("%s: convertToHALStreamCombination failed", __FUNCTION__);
             return INVALID_OPERATION;
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
index 0bfa7d4..1983cc3 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.h
@@ -127,20 +127,21 @@
 
         virtual status_t isSessionConfigurationSupported(
                 const SessionConfiguration &/*configuration*/,
-                bool overrideForPerfClass, bool checkSessionParams,
-                bool *status/*status*/);
+                bool overrideForPerfClass, camera3::metadataGetter getMetadata,
+                bool checkSessionParams, bool *status/*status*/);
 
         virtual status_t createDefaultRequest(
                     camera3::camera_request_template_t templateId,
-                    camera_metadata_t** metadata) override;
+                    CameraMetadata* metadata) override;
 
         virtual status_t getSessionCharacteristics(
                 const SessionConfiguration &/*configuration*/,
                 bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
-                CameraMetadata *sessionCharacteristics /*sessionCharacteristics*/);
+                CameraMetadata */*outChars*/);
 
         std::shared_ptr<aidl::android::hardware::camera::device::ICameraDevice>
                 startDeviceInterface();
+        std::vector<int32_t> mAdditionalKeysForFeatureQuery;
     };
 
  private:
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index d2643c1..6eaf41f 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -692,6 +692,14 @@
         mHasFlashUnit = false;
     }
 
+    if (flags::feature_combination_query()) {
+        res = addSessionConfigQueryVersionTag();
+        if (OK != res) {
+            ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+        }
+    }
+
     camera_metadata_entry entry =
             mCameraCharacteristics.find(ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL);
     if (entry.count == 1) {
@@ -853,7 +861,7 @@
 
 status_t HidlProviderInfo::HidlDeviceInfo3::isSessionConfigurationSupported(
         const SessionConfiguration &configuration, bool overrideForPerfClass,
-        bool checkSessionParams, bool *status) {
+        camera3::metadataGetter getMetadata, bool checkSessionParams, bool *status) {
 
     if (checkSessionParams) {
         // HIDL device doesn't support checking session parameters
@@ -862,8 +870,6 @@
 
     hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
     bool earlyExit = false;
-    camera3::metadataGetter getMetadata = [this](const std::string &id,
-            bool /*overrideForPerfClass*/) {return this->deviceInfo(id);};
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
             mId, mCameraCharacteristics, getMetadata, mPhysicalIds,
             configuration_3_7, overrideForPerfClass, mProviderTagid,
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
index 869bba0..2838f03 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.h
@@ -105,8 +105,8 @@
 
         virtual status_t isSessionConfigurationSupported(
                 const SessionConfiguration &/*configuration*/,
-                bool overrideForPerfClass, bool checkSessionParams,
-                bool *status/*status*/);
+                bool overrideForPerfClass, camera3::metadataGetter /*getMetadata*/,
+                bool checkSessionParams, bool *status/*status*/);
 
         sp<hardware::camera::device::V3_2::ICameraDevice> startDeviceInterface();
     };
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 2718604..f4d8f7f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -45,6 +45,7 @@
 #include <utility>
 
 #include <android-base/stringprintf.h>
+#include <sched.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <utils/Timers.h>
@@ -64,11 +65,11 @@
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "utils/CameraThreadState.h"
 #include "utils/CameraTraces.h"
 #include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
+#include "utils/Utils.h"
 
 #include <algorithm>
 #include <optional>
@@ -77,13 +78,16 @@
 using namespace android::camera3;
 using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+using namespace android::hardware::cameraservice::utils::conversion::aidl;
 
 namespace flags = com::android::internal::camera::flags;
 namespace android {
-namespace flags = com::android::internal::camera::flags;
+
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
         bool legacyClient):
+        AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
         mLegacyClient(legacyClient),
@@ -145,17 +149,6 @@
     /** Register in-flight map to the status tracker */
     mInFlightStatusId = mStatusTracker->addComponent("InflightRequests");
 
-    if (mUseHalBufManager) {
-        res = mRequestBufferSM.initialize(mStatusTracker);
-        if (res != OK) {
-            SET_ERR_L("Unable to start request buffer state machine: %s (%d)",
-                    strerror(-res), res);
-            mInterface->close();
-            mStatusTracker.clear();
-            return res;
-        }
-    }
-
     /** Create buffer manager */
     mBufferManager = new Camera3BufferManager();
 
@@ -266,6 +259,8 @@
         return res;
     }
 
+    mSupportsExtensionKeys = areExtensionKeysSupported(mDeviceInfo);
+
     return OK;
 }
 
@@ -1371,7 +1366,8 @@
 status_t Camera3Device::filterParamsAndConfigureLocked(const CameraMetadata& params,
         int operatingMode) {
     CameraMetadata filteredParams;
-    SessionConfigurationUtils::filterParameters(params, mDeviceInfo, mVendorTagId, filteredParams);
+    SessionConfigurationUtils::filterParameters(params, mDeviceInfo,
+            /*additionalKeys*/{}, mVendorTagId, filteredParams);
 
     camera_metadata_entry_t availableSessionKeys = mDeviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
@@ -1441,7 +1437,7 @@
 
     if (templateId <= 0 || templateId >= CAMERA_TEMPLATE_COUNT) {
         android_errorWriteWithInfoLog(CameraService::SN_EVENT_LOG_ID, "26866110",
-                CameraThreadState::getCallingUid(), nullptr, 0);
+                getCallingUid(), nullptr, 0);
         return BAD_VALUE;
     }
 
@@ -1492,29 +1488,13 @@
         set_camera_metadata_vendor_id(rawRequest, mVendorTagId);
         mRequestTemplateCache[templateId].acquire(rawRequest);
 
-        // Override the template request with zoomRatioMapper
-        res = mZoomRatioMappers[mId].initZoomRatioInTemplate(
-                &mRequestTemplateCache[templateId]);
+        res = overrideDefaultRequestKeys(&mRequestTemplateCache[templateId]);
         if (res != OK) {
-            CLOGE("Failed to update zoom ratio for template %d: %s (%d)",
+            CLOGE("Failed to overrideDefaultRequestKeys for template %d: %s (%d)",
                     templateId, strerror(-res), res);
             return res;
         }
 
-        // Fill in JPEG_QUALITY if not available
-        if (!mRequestTemplateCache[templateId].exists(ANDROID_JPEG_QUALITY)) {
-            static const uint8_t kDefaultJpegQuality = 95;
-            mRequestTemplateCache[templateId].update(ANDROID_JPEG_QUALITY,
-                    &kDefaultJpegQuality, 1);
-        }
-
-        // Fill in AUTOFRAMING if not available
-        if (!mRequestTemplateCache[templateId].exists(ANDROID_CONTROL_AUTOFRAMING)) {
-            static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
-            mRequestTemplateCache[templateId].update(ANDROID_CONTROL_AUTOFRAMING,
-                    &kDefaultAutoframingMode, 1);
-        }
-
         *request = mRequestTemplateCache[templateId];
         mLastTemplateId = templateId;
     }
@@ -1624,7 +1604,9 @@
     mStatusWaiters++;
 
     bool signalPipelineDrain = false;
-    if (!active && mUseHalBufManager) {
+    if (!active &&
+            (mUseHalBufManager ||
+                    (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() != 0))) {
         auto streamIds = mOutputStreams.getStreamIds();
         if (mStatus == STATUS_ACTIVE) {
             mRequestThread->signalPipelineDrain(streamIds);
@@ -1983,9 +1965,10 @@
             // Get session stats from the builder, and notify the listener.
             int64_t requestCount, resultErrorCount;
             bool deviceError;
+            std::pair<int32_t, int32_t> mostRequestedFpsRange;
             std::map<int, StreamStats> streamStatsMap;
             mSessionStatsBuilder.buildAndReset(&requestCount, &resultErrorCount,
-                    &deviceError, &streamStatsMap);
+                    &deviceError, &mostRequestedFpsRange, &streamStatsMap);
             for (size_t i = 0; i < streamIds.size(); i++) {
                 int streamId = streamIds[i];
                 auto stats = streamStatsMap.find(streamId);
@@ -2003,7 +1986,8 @@
                            stats->second.mCaptureLatencyHistogram.end());
                 }
             }
-            listener->notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+            listener->notifyIdle(requestCount, resultErrorCount, deviceError,
+                mostRequestedFpsRange, streamStats);
         } else {
             res = listener->notifyActive(sessionMaxPreviewFps);
         }
@@ -2394,6 +2378,42 @@
     return ret;
 }
 
+Camera3Device::RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid) : mTid(tid),
+        mPreviousPolicy(sched_getscheduler(tid)) {
+    if (flags::surface_ipc()) {
+        auto res = sched_getparam(mTid, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't retrieve thread scheduler parameters: %s (%d)",
+                    strerror(-res), res);
+            return;
+        }
+
+        struct sched_param param = {0};
+        param.sched_priority = kRequestThreadPriority;
+
+        res = sched_setscheduler(mTid, SCHED_FIFO, &param);
+        if (res != OK) {
+            ALOGW("Can't set realtime priority for thread: %s (%d)",
+                    strerror(-res), res);
+        } else {
+            ALOGD("Set real time priority for thread (tid %d)", mTid);
+            mPolicyBumped = true;
+        }
+    }
+}
+
+Camera3Device::RunThreadWithRealtimePriority::~RunThreadWithRealtimePriority() {
+    if (mPolicyBumped && flags::surface_ipc()) {
+        auto res = sched_setscheduler(mTid, mPreviousPolicy, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't set regular priority for thread: %s (%d)",
+                    strerror(-res), res);
+        } else {
+            ALOGD("Set regular priority for thread (tid %d)", mTid);
+        }
+    }
+}
+
 status_t Camera3Device::configureStreamsLocked(int operatingMode,
         const CameraMetadata& sessionParams, bool notifyRequestThread) {
     ATRACE_CALL();
@@ -2540,11 +2560,14 @@
     }
 
     config.streams = streams.editArray();
+    config.hal_buffer_managed_streams = mHalBufManagedStreamIds;
     config.use_hal_buf_manager = mUseHalBufManager;
 
     // Do the HAL configuration; will potentially touch stream
-    // max_buffers, usage, and priv fields, as well as data_space and format
-    // fields for IMPLEMENTATION_DEFINED formats.
+    // max_buffers, usage, priv fields, data_space and format
+    // fields for IMPLEMENTATION_DEFINED formats as well as hal buffer managed
+    // streams and use_hal_buf_manager (in case aconfig flag session_hal_buf_manager
+    // is not enabled but the HAL supports session specific hal buffer manager).
 
     int64_t logId = mCameraServiceProxyWrapper->getCurrentLogIdForCamera(mId);
     const camera_metadata_t *sessionBuffer = sessionParams.getAndLock();
@@ -2564,13 +2587,19 @@
                 strerror(-res), res);
         return res;
     }
+    // It is possible that use hal buffer manager behavior was changed by the
+    // configureStreams call.
+    mUseHalBufManager = config.use_hal_buf_manager;
     if (flags::session_hal_buf_manager()) {
-        bool prevSessionHalBufManager = mUseHalBufManager;
-        // It is possible that configureStreams() changed config.use_hal_buf_manager
-        mUseHalBufManager = config.use_hal_buf_manager;
-        if (prevSessionHalBufManager && !mUseHalBufManager) {
+        bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
+        // It is possible that configureStreams() changed config.hal_buffer_managed_streams
+        mHalBufManagedStreamIds = config.hal_buffer_managed_streams;
+
+        bool thisSessionHalBufManager = mHalBufManagedStreamIds.size() != 0;
+
+        if (prevSessionHalBufManager && !thisSessionHalBufManager) {
             mRequestBufferSM.deInit();
-        } else if (!prevSessionHalBufManager && mUseHalBufManager) {
+        } else if (!prevSessionHalBufManager && thisSessionHalBufManager) {
             res = mRequestBufferSM.initialize(mStatusTracker);
             if (res != OK) {
                 SET_ERR_L("%s: Camera %s: RequestBuffer State machine couldn't be initialized!",
@@ -2578,45 +2607,53 @@
                 return res;
             }
         }
-        mRequestThread->setHalBufferManager(mUseHalBufManager);
-    }
-    // Finish all stream configuration immediately.
-    // TODO: Try to relax this later back to lazy completion, which should be
-    // faster
-
-    if (mInputStream != NULL && mInputStream->isConfiguring()) {
-        bool streamReConfigured = false;
-        res = mInputStream->finishConfiguration(&streamReConfigured);
-        if (res != OK) {
-            CLOGE("Can't finish configuring input stream %d: %s (%d)",
-                    mInputStream->getId(), strerror(-res), res);
-            cancelStreamsConfigurationLocked();
-            if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
-                return DEAD_OBJECT;
-            }
-            return BAD_VALUE;
-        }
-        if (streamReConfigured) {
-            mInterface->onStreamReConfigured(mInputStream->getId());
-        }
+        mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
     }
 
-    for (size_t i = 0; i < mOutputStreams.size(); i++) {
-        sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
-        if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
+    {
+        // Stream/surface setup can include a lot of binder IPC. Raise the
+        // thread priority when running the binder IPC heavy configuration
+        // sequence.
+        RunThreadWithRealtimePriority priorityBump;
+
+        // Finish all stream configuration immediately.
+        // TODO: Try to relax this later back to lazy completion, which should be
+        // faster
+
+        if (mInputStream != NULL && mInputStream->isConfiguring()) {
             bool streamReConfigured = false;
-            res = outputStream->finishConfiguration(&streamReConfigured);
+            res = mInputStream->finishConfiguration(&streamReConfigured);
             if (res != OK) {
-                CLOGE("Can't finish configuring output stream %d: %s (%d)",
-                        outputStream->getId(), strerror(-res), res);
+                CLOGE("Can't finish configuring input stream %d: %s (%d)",
+                        mInputStream->getId(), strerror(-res), res);
                 cancelStreamsConfigurationLocked();
-                if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
+                if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
                     return DEAD_OBJECT;
                 }
                 return BAD_VALUE;
             }
             if (streamReConfigured) {
-                mInterface->onStreamReConfigured(outputStream->getId());
+                mInterface->onStreamReConfigured(mInputStream->getId());
+            }
+        }
+
+        for (size_t i = 0; i < mOutputStreams.size(); i++) {
+            sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
+            if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
+                bool streamReConfigured = false;
+                res = outputStream->finishConfiguration(&streamReConfigured);
+                if (res != OK) {
+                    CLOGE("Can't finish configuring output stream %d: %s (%d)",
+                            outputStream->getId(), strerror(-res), res);
+                    cancelStreamsConfigurationLocked();
+                    if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
+                        return DEAD_OBJECT;
+                    }
+                    return BAD_VALUE;
+                }
+                if (streamReConfigured) {
+                    mInterface->onStreamReConfigured(outputStream->getId());
+                }
             }
         }
     }
@@ -2906,7 +2943,8 @@
 
     FlushInflightReqStates states {
         mId, mInFlightLock, mInFlightMap, mUseHalBufManager,
-        listener, *this, *mInterface, *this, mSessionStatsBuilder};
+        mHalBufManagedStreamIds, listener, *this, *mInterface, *this,
+        mSessionStatsBuilder};
 
     camera3::flushInflightRequests(states);
 }
@@ -2933,6 +2971,16 @@
             physicalMetadata, outputBuffers, numOutputBuffers, inputStreamId);
 }
 
+void Camera3Device::collectRequestStats(int64_t frameNumber, const CameraMetadata &request) {
+    if (flags::analytics_24q3()) {
+        auto entry = request.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+        if (entry.count >= 2) {
+            mSessionStatsBuilder.incFpsRequestedCount(
+                entry.data.i32[0], entry.data.i32[1], frameNumber);
+        }
+    }
+}
+
 void Camera3Device::cleanupNativeHandles(
         std::vector<native_handle_t*> *handles, bool closeFd) {
     if (handles == nullptr) {
@@ -2971,6 +3019,11 @@
     return mBufferRecords.verifyBufferIds(streamId, bufIds);
 }
 
+bool Camera3Device::HalInterface::isHalBufferManagedStream(int32_t streamId) const {
+    return (mUseHalBufManager || (flags::session_hal_buf_manager() &&
+                                  contains(mHalBufManagedStreamIds, streamId)));
+}
+
 status_t Camera3Device::HalInterface::popInflightBuffer(
         int32_t frameNumber, int32_t streamId,
         /*out*/ buffer_handle_t **buffer) {
@@ -3063,7 +3116,7 @@
         mOverrideToPortrait(overrideToPortrait),
         mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
-    mVndkVersion = property_get_int32("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 
 Camera3Device::RequestThread::~RequestThread() {}
@@ -3289,8 +3342,9 @@
     mDoPauseSignal.signal();
 }
 
-void Camera3Device::RequestThread::setHalBufferManager(bool enabled) {
-    mUseHalBufManager = enabled;
+void Camera3Device::RequestThread::setHalBufferManagedStreams(
+            const std::set<int32_t> &halBufferManagedStreams) {
+    mHalBufManagedStreamIds = halBufferManagedStreams;
 }
 
 status_t Camera3Device::RequestThread::waitUntilRequestProcessed(
@@ -3467,7 +3521,8 @@
 void Camera3Device::RequestThread::updateNextRequest(NextRequest& nextRequest) {
     // Update the latest request sent to HAL
     camera_capture_request_t& halRequest = nextRequest.halRequest;
-    if (halRequest.settings != NULL) { // Don't update if they were unchanged
+    sp<Camera3Device> parent = mParent.promote();
+    if (halRequest.settings != nullptr) { // Don't update if they were unchanged
         Mutex::Autolock al(mLatestRequestMutex);
 
         camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
@@ -3480,8 +3535,7 @@
                     CameraMetadata(cloned));
         }
 
-        sp<Camera3Device> parent = mParent.promote();
-        if (parent != NULL) {
+        if (parent != nullptr) {
             int32_t inputStreamId = -1;
             if (halRequest.input_buffer != nullptr) {
               inputStreamId = Camera3Stream::cast(halRequest.input_buffer->stream)->getId();
@@ -3493,8 +3547,11 @@
                     halRequest.num_output_buffers, inputStreamId);
         }
     }
+    if (parent != nullptr) {
+        parent->collectRequestStats(halRequest.frame_number, mLatestRequest);
+    }
 
-    if (halRequest.settings != NULL) {
+    if (halRequest.settings != nullptr) {
         nextRequest.captureRequest->mSettingsList.begin()->metadata.unlock(
                 halRequest.settings);
     }
@@ -3886,13 +3943,22 @@
 
                     for (it = captureRequest->mSettingsList.begin();
                             it != captureRequest->mSettingsList.end(); it++) {
-                        res = hardware::cameraservice::utils::conversion::aidl::filterVndkKeys(
-                                mVndkVersion, it->metadata, false /*isStatic*/);
+                        res = filterVndkKeys(mVndkVersion, it->metadata, false /*isStatic*/);
                         if (res != OK) {
                             SET_ERR("RequestThread: Failed during VNDK filter of capture requests "
                                     "%d: %s (%d)", halRequest->frame_number, strerror(-res), res);
                             return INVALID_OPERATION;
                         }
+
+                        if (!parent->mSupportsExtensionKeys) {
+                            res = filterExtensionKeys(&it->metadata);
+                            if (res != OK) {
+                                SET_ERR("RequestThread: Failed during extension filter of capture "
+                                        "requests %d: %s (%d)", halRequest->frame_number,
+                                        strerror(-res), res);
+                                return INVALID_OPERATION;
+                            }
+                        }
                     }
                 }
             }
@@ -3974,11 +4040,15 @@
         nsecs_t waitDuration = kBaseGetBufferWait + parent->getExpectedInFlightDuration();
 
         SurfaceMap uniqueSurfaceIdMap;
+        bool containsHalBufferManagedStream = false;
         for (size_t j = 0; j < captureRequest->mOutputStreams.size(); j++) {
             sp<Camera3OutputStreamInterface> outputStream =
                     captureRequest->mOutputStreams.editItemAt(j);
             int streamId = outputStream->getId();
-
+            if (!containsHalBufferManagedStream) {
+                containsHalBufferManagedStream =
+                        contains(mHalBufManagedStreamIds, streamId);
+            }
             // Prepare video buffers for high speed recording on the first video request.
             if (mPrepareVideoStream && outputStream->isVideoStream()) {
                 // Only try to prepare video stream on the first video request.
@@ -4010,7 +4080,7 @@
                 uniqueSurfaceIdMap.insert({streamId, std::move(uniqueSurfaceIds)});
             }
 
-            if (mUseHalBufManager) {
+            if (parent->isHalBufferManagedStream(streamId)) {
                 if (outputStream->isAbandoned()) {
                     ALOGV("%s: stream %d is abandoned, skipping request", __FUNCTION__, streamId);
                     return TIMED_OUT;
@@ -4101,6 +4171,9 @@
                 isZslCapture = true;
             }
         }
+        bool passSurfaceMap =
+                mUseHalBufManager ||
+                        (flags::session_hal_buf_manager() && containsHalBufferManagedStream);
         auto expectedDurationInfo = calculateExpectedDurationRange(settings);
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
@@ -4112,7 +4185,7 @@
                 requestedPhysicalCameras, isStillCapture, isZslCapture,
                 captureRequest->mRotateAndCropAuto, captureRequest->mAutoframingAuto,
                 mPrevCameraIdsWithZoom,
-                (mUseHalBufManager) ? uniqueSurfaceIdMap :
+                passSurfaceMap ? uniqueSurfaceIdMap :
                                       SurfaceMap{}, captureRequest->mRequestTimeNs);
         ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
                ", burstId = %" PRId32 ".",
@@ -4215,7 +4288,8 @@
 }
 
 void Camera3Device::RequestThread::signalPipelineDrain(const std::vector<int>& streamIds) {
-    if (!mUseHalBufManager) {
+    if (!mUseHalBufManager &&
+            (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() == 0)) {
         ALOGE("%s called for camera device not supporting HAL buffer management", __FUNCTION__);
         return;
     }
@@ -4367,22 +4441,28 @@
             captureRequest->mInputStream->returnInputBuffer(captureRequest->mInputBuffer);
         }
 
-        // No output buffer can be returned when using HAL buffer manager
-        if (!mUseHalBufManager) {
-            for (size_t i = 0; i < halRequest->num_output_buffers; i++) {
-                //Buffers that failed processing could still have
-                //valid acquire fence.
-                int acquireFence = (*outputBuffers)[i].acquire_fence;
-                if (0 <= acquireFence) {
-                    close(acquireFence);
-                    outputBuffers->editItemAt(i).acquire_fence = -1;
-                }
-                outputBuffers->editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
-                captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i],
-                        /*timestamp*/0, /*readoutTimestamp*/0,
-                        /*timestampIncreasing*/true, std::vector<size_t> (),
-                        captureRequest->mResultExtras.frameNumber);
+        for (size_t i = 0; i < halRequest->num_output_buffers; i++) {
+            //Buffers that failed processing could still have
+            //valid acquire fence.
+            Camera3Stream *stream = Camera3Stream::cast((*outputBuffers)[i].stream);
+            int32_t streamId = stream->getId();
+            bool skipBufferForStream =
+                    mUseHalBufManager || (flags::session_hal_buf_manager() &&
+                            contains(mHalBufManagedStreamIds, streamId));
+            if (skipBufferForStream) {
+                // No output buffer can be returned when using HAL buffer manager for its stream
+                continue;
             }
+            int acquireFence = (*outputBuffers)[i].acquire_fence;
+            if (0 <= acquireFence) {
+                close(acquireFence);
+                outputBuffers->editItemAt(i).acquire_fence = -1;
+            }
+            outputBuffers->editItemAt(i).status = CAMERA_BUFFER_STATUS_ERROR;
+            captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i],
+                    /*timestamp*/0, /*readoutTimestamp*/0,
+                    /*timestampIncreasing*/true, std::vector<size_t> (),
+                    captureRequest->mResultExtras.frameNumber);
         }
 
         if (sendRequestError) {
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 9a2f2b1..d646886 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -47,6 +47,7 @@
 #include "device3/Camera3OutputInterface.h"
 #include "device3/Camera3OfflineSession.h"
 #include "device3/Camera3StreamInterface.h"
+#include "utils/AttributionAndPermissionUtils.h"
 #include "utils/TagMonitor.h"
 #include "utils/IPCTransport.h"
 #include "utils/LatencyHistogram.h"
@@ -79,12 +80,14 @@
             public camera3::SetErrorInterface,
             public camera3::InflightRequestUpdateInterface,
             public camera3::RequestBufferInterface,
-            public camera3::FlushBufferInterface {
+            public camera3::FlushBufferInterface,
+            public AttributionAndPermissionUtilsEncapsulator {
   friend class HidlCamera3Device;
   friend class AidlCamera3Device;
   public:
 
     explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
@@ -97,6 +100,10 @@
         return mInterface->getTransportType();
     }
 
+    bool isHalBufferManagedStream(int32_t streamId) const {
+        return mInterface->isHalBufferManagedStream(streamId);
+    };
+
     /**
      * CameraDeviceBase interface
      */
@@ -485,6 +492,9 @@
 
         /////////////////////////////////////////////////////////////////////
 
+        //Check if a stream is hal buffer managed
+        bool isHalBufferManagedStream(int32_t streamId) const;
+
         // Get a vector of (frameNumber, streamId) pair of currently inflight
         // buffers
         void getInflightBufferKeys(std::vector<std::pair<int32_t, int32_t>>* out);
@@ -556,7 +566,9 @@
 
         uint32_t mNextStreamConfigCounter = 1;
 
+        // TODO: This can be removed after flags::session_hal_buf_manager is removed
         bool mUseHalBufManager = false;
+        std::set<int32_t > mHalBufManagedStreamIds;
         bool mIsReconfigurationQuerySupported;
 
         const bool mSupportOfflineProcessing;
@@ -957,11 +969,11 @@
         void     setPaused(bool paused);
 
         /**
-         * Set Hal buffer manager behavior
-         * @param enabled Whether HAL buffer manager is enabled for the current session.
+         * Set Hal buffer managed streams
+         * @param halBufferManagedStreams The streams for which hal buffer manager is enabled
          *
          */
-        void setHalBufferManager(bool enabled);
+        void setHalBufferManagedStreams(const std::set<int32_t> &halBufferManagedStreams);
 
         /**
          * Wait until thread processes the capture request with settings'
@@ -1212,6 +1224,7 @@
         std::map<int32_t, std::set<std::string>> mGroupIdPhysicalCameraMap;
 
         bool               mUseHalBufManager = false;
+        std::set<int32_t > mHalBufManagedStreamIds;
         const bool         mSupportCameraMute;
         const bool         mOverrideToPortrait;
         const bool         mSupportSettingsOverride;
@@ -1391,6 +1404,10 @@
             const camera_stream_buffer_t *outputBuffers, uint32_t numOutputBuffers,
             int32_t inputStreamId);
 
+    // Collect any statistics that are based on the stream of capture requests sent
+    // to the HAL
+    void collectRequestStats(int64_t frameNumber, const CameraMetadata& request);
+
     metadata_vendor_id_t mVendorTagId;
 
     // Cached last requested template id
@@ -1402,6 +1419,7 @@
 
     // Whether HAL request buffers through requestStreamBuffers API
     bool mUseHalBufManager = false;
+    std::set<int32_t > mHalBufManagedStreamIds;
     bool mSessionHalBufManager = false;
     // Lock to ensure requestStreamBuffers() callbacks are serialized
     std::mutex mRequestBufferInterfaceLock;
@@ -1525,6 +1543,9 @@
     // AE_TARGET_FPS_RANGE
     bool mIsFixedFps = false;
 
+    // Flag to indicate that we shouldn't forward extension related metadata
+    bool mSupportsExtensionKeys = false;
+
     // Injection camera related methods.
     class Camera3DeviceInjectionMethods : public virtual RefBase {
       public:
@@ -1609,6 +1630,35 @@
 
     void overrideStreamUseCaseLocked();
 
+    // An instance of this class will raise the scheduling policy of a given
+    // given thread to real time and keep it this way throughout the lifetime
+    // of the object. The thread scheduling policy will revert back to its original
+    // state after the instances is released. By default the implementation will
+    // raise the priority of the current thread unless clients explicitly specify
+    // another thread id.
+    // Client must avoid:
+    //  - Keeping an instance of this class for extended and long running operations.
+    //    This is only intended for short/temporarily priority bumps that mitigate
+    //    scheduling delays within critical camera paths.
+    //  - Allocating instances of this class on the memory heap unless clients have
+    //    complete control over the object lifetime. It is preferable to allocate
+    //    instances of this class on the stack instead.
+    //  - Nesting multiple instances of this class using the same default or same thread id.
+    class RunThreadWithRealtimePriority final {
+        public:
+            RunThreadWithRealtimePriority(int tid = gettid());
+            ~RunThreadWithRealtimePriority();
+
+            RunThreadWithRealtimePriority(const RunThreadWithRealtimePriority&) = delete;
+            RunThreadWithRealtimePriority& operator=(const RunThreadWithRealtimePriority&) = delete;
+
+        private:
+            int mTid;
+            int mPreviousPolicy;
+            bool mPolicyBumped = false;
+            struct sched_param mPreviousParams;
+    };
+
 }; // class Camera3Device
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 75162bf..55467c3 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -68,7 +68,7 @@
     return INVALID_OPERATION;
 }
 
-void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::string lines;
     lines += fmt::sprintf("    Stream[%d]: Fake\n", mId);
     write(fd, lines.c_str(), lines.size());
@@ -99,12 +99,12 @@
     return OK;
 }
 
-status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3FakeStream::getEndpointUsage(uint64_t *usage) {
     *usage = FAKE_USAGE;
     return OK;
 }
 
-bool Camera3FakeStream::isVideoStream() const {
+bool Camera3FakeStream::isVideoStream() {
     return false;
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 1d82190..7addb90 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -50,7 +50,7 @@
      * Camera3Stream interface
      */
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     status_t         setTransform(int transform, bool mayChangeMirror);
 
@@ -70,7 +70,7 @@
     /**
      * Return if this output stream is for video encoding.
      */
-    bool isVideoStream() const;
+    bool isVideoStream();
 
     /**
      * Return if the consumer configuration of this stream is deferred.
@@ -144,7 +144,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
 }; // class Camera3FakeStream
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 152687e..22d2716 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -77,7 +77,7 @@
     return false;
 }
 
-void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::ostringstream lines;
 
     uint64_t consumerUsage = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 239fc71..7e73662 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -52,7 +52,7 @@
      * Camera3Stream interface
      */
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     int              getMaxTotalBuffers() const { return mTotalBufferCount; }
   protected:
@@ -108,7 +108,7 @@
     virtual size_t   getCachedOutputBufferCountLocked() const;
     virtual size_t   getMaxCachedOutputBuffersLocked() const;
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
+    virtual status_t getEndpointUsage(uint64_t *usage) = 0;
 
     status_t getBufferPreconditionCheckLocked() const;
     status_t returnBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 54ffbd7..283322e 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -216,7 +216,7 @@
     return OK;
 }
 
-void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::string lines;
     lines += fmt::sprintf("    Stream[%d]: Input\n", mId);
     write(fd, lines.c_str(), lines.size());
@@ -297,7 +297,7 @@
     return OK;
 }
 
-status_t Camera3InputStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3InputStream::getEndpointUsage(uint64_t *usage) {
     // Per HAL3 spec, input streams have 0 for their initial usage field.
     *usage = 0;
     return OK;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index d4f4b15..a99c364 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -43,7 +43,7 @@
     Camera3InputStream(int id, uint32_t width, uint32_t height, int format);
     ~Camera3InputStream();
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     // TODO: expose an interface to get the IGraphicBufferProducer
 
@@ -81,7 +81,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
     /**
      * BufferItemConsumer::BufferFreedListener interface
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index 172b62a..1025061 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -58,6 +58,7 @@
         mTagMonitor(offlineStates.mTagMonitor),
         mVendorTagId(offlineStates.mVendorTagId),
         mUseHalBufManager(offlineStates.mUseHalBufManager),
+        mHalBufManagedStreamIds(offlineStates.mHalBufManagedStreamIds),
         mNeedFixupMonochromeTags(offlineStates.mNeedFixupMonochromeTags),
         mUsePartialResult(offlineStates.mUsePartialResult),
         mNumPartialResults(offlineStates.mNumPartialResults),
@@ -136,7 +137,7 @@
 
     FlushInflightReqStates states {
         mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
-        listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
+        mHalBufManagedStreamIds, listener, *this, mBufferRecords, *this, mSessionStatsBuilder};
 
     camera3::flushInflightRequests(states);
 
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index b5fd486..1ef3921 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -51,7 +51,8 @@
 struct Camera3OfflineStates {
     Camera3OfflineStates(
             const TagMonitor& tagMonitor, const metadata_vendor_id_t vendorTagId,
-            const bool useHalBufManager, const bool needFixupMonochromeTags,
+            const bool useHalBufManager, const std::set<int32_t> &halBufferManagedStreamIds,
+            const bool needFixupMonochromeTags,
             const bool usePartialResult, const uint32_t numPartialResults,
             const int64_t lastCompletedRegularFN, const int64_t lastCompletedReprocessFN,
             const int64_t lastCompletedZslFN, const uint32_t nextResultFN,
@@ -64,7 +65,8 @@
             const std::unordered_map<std::string, camera3::RotateAndCropMapper>&
                 rotateAndCropMappers) :
             mTagMonitor(tagMonitor), mVendorTagId(vendorTagId),
-            mUseHalBufManager(useHalBufManager), mNeedFixupMonochromeTags(needFixupMonochromeTags),
+            mUseHalBufManager(useHalBufManager), mHalBufManagedStreamIds(halBufferManagedStreamIds),
+            mNeedFixupMonochromeTags(needFixupMonochromeTags),
             mUsePartialResult(usePartialResult), mNumPartialResults(numPartialResults),
             mLastCompletedRegularFrameNumber(lastCompletedRegularFN),
             mLastCompletedReprocessFrameNumber(lastCompletedReprocessFN),
@@ -85,6 +87,7 @@
     const metadata_vendor_id_t mVendorTagId;
 
     const bool mUseHalBufManager;
+    const std::set<int32_t > &mHalBufManagedStreamIds;
     const bool mNeedFixupMonochromeTags;
 
     const bool mUsePartialResult;
@@ -181,6 +184,7 @@
     const metadata_vendor_id_t mVendorTagId;
 
     const bool mUseHalBufManager;
+    const std::set<int32_t > &mHalBufManagedStreamIds;
     const bool mNeedFixupMonochromeTags;
 
     const bool mUsePartialResult;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index f98636b..3cd4543 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -27,6 +27,7 @@
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include <android-base/unique_fd.h>
+#include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Log.h>
@@ -43,6 +44,8 @@
     (type *)((char*)(ptr) - offsetof(type, member))
 #endif
 
+namespace flags = com::android::internal::camera::flags;
+
 namespace android {
 
 namespace camera3 {
@@ -165,7 +168,6 @@
         mState = STATE_ERROR;
     }
 
-    mConsumerName = "Deferred";
     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
@@ -235,65 +237,6 @@
     return OK;
 }
 
-status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
-    status_t res;
-
-    if ((res = getBufferPreconditionCheckLocked()) != OK) {
-        return res;
-    }
-
-    if (mUseBufferManager) {
-        ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
-                __FUNCTION__, mId);
-        return INVALID_OPERATION;
-    }
-
-    sp<Surface> consumer = mConsumer;
-    /**
-     * Release the lock briefly to avoid deadlock for below scenario:
-     * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
-     * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
-     * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
-     * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
-     * StreamingProcessor lock.
-     * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
-     * and try to lock bufferQueue lock.
-     * Then there is circular locking dependency.
-     */
-    mLock.unlock();
-
-    size_t numBuffersRequested = outBuffers->size();
-    std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
-
-    nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
-    res = consumer->dequeueBuffers(&buffers);
-    nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
-    mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
-
-    mLock.lock();
-
-    if (res != OK) {
-        if (shouldLogError(res, mState)) {
-            ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
-                    __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
-        }
-        checkRetAndSetAbandonedLocked(res);
-        return res;
-    }
-    checkRemovedBuffersLocked();
-
-    /**
-     * FenceFD now owned by HAL except in case of error,
-     * in which case we reassign it to acquire_fence
-     */
-    for (size_t i = 0; i < numBuffersRequested; i++) {
-        handoutBufferLocked(*(outBuffers->at(i).outBuffer),
-                &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
-                /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
-    }
-    return OK;
-}
-
 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
             ANativeWindowBuffer* buffer, int anwReleaseFence,
             const std::vector<size_t>&) {
@@ -523,10 +466,11 @@
     return res;
 }
 
-void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
+void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) {
     std::string lines;
     lines += fmt::sprintf("    Stream[%d]: Output\n", mId);
-    lines += fmt::sprintf("      Consumer name: %s\n", mConsumerName);
+    lines += fmt::sprintf("      Consumer name: %s\n", (mConsumer.get() != nullptr) ?
+            mConsumer->getConsumerName() : "Deferred");
     write(fd, lines.c_str(), lines.size());
 
     Camera3IOStreamBase::dump(fd, args);
@@ -619,8 +563,6 @@
         return res;
     }
 
-    mConsumerName = mConsumer->getConsumerName();
-
     res = native_window_set_usage(mConsumer.get(), mUsage);
     if (res != OK) {
         ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
@@ -668,7 +610,7 @@
         return res;
     }
 
-    int maxConsumerBuffers;
+    int maxConsumerBuffers = 0;
     res = static_cast<ANativeWindow*>(mConsumer.get())->query(
             mConsumer.get(),
             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
@@ -745,8 +687,11 @@
         }
     }
 
-    res = native_window_set_buffer_count(mConsumer.get(),
-            mTotalBufferCount);
+    if (flags::surface_ipc()) {
+        res = mConsumer->setMaxDequeuedBufferCount(mTotalBufferCount - maxConsumerBuffers);
+    } else {
+        res = native_window_set_buffer_count(mConsumer.get(), mTotalBufferCount);
+    }
     if (res != OK) {
         ALOGE("%s: Unable to set buffer count for stream %d",
                 __FUNCTION__, mId);
@@ -1048,7 +993,7 @@
     return OK;
 }
 
-status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) {
 
     status_t res;
 
@@ -1084,17 +1029,24 @@
 }
 
 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
-        const sp<Surface>& surface) const {
-    status_t res;
-    uint64_t u = 0;
+        const sp<Surface>& surface) {
+    bool internalConsumer = (mConsumer.get() != nullptr) && (mConsumer == surface);
+    if (mConsumerUsageCachedValue.has_value() && flags::surface_ipc() && internalConsumer) {
+        *usage = mConsumerUsageCachedValue.value();
+        return OK;
+    }
 
-    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
-    applyZSLUsageQuirk(camera_stream::format, &u);
-    *usage = u;
+    status_t res;
+
+    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), usage);
+    applyZSLUsageQuirk(camera_stream::format, usage);
+    if (internalConsumer) {
+        mConsumerUsageCachedValue = *usage;
+    }
     return res;
 }
 
-bool Camera3OutputStream::isVideoStream() const {
+bool Camera3OutputStream::isVideoStream() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
@@ -1275,7 +1227,7 @@
     return OK;
 }
 
-bool Camera3OutputStream::isConsumedByHWComposer() const {
+bool Camera3OutputStream::isConsumedByHWComposer() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
@@ -1286,7 +1238,7 @@
     return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
 }
 
-bool Camera3OutputStream::isConsumedByHWTexture() const {
+bool Camera3OutputStream::isConsumedByHWTexture() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
@@ -1297,7 +1249,7 @@
     return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
 }
 
-bool Camera3OutputStream::isConsumedByCPU() const {
+bool Camera3OutputStream::isConsumedByCPU() {
     uint64_t usage = 0;
     status_t res = getEndpointUsage(&usage);
     if (res != OK) {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 65791a9..8a93ed8 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -18,6 +18,7 @@
 #define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H
 
 #include <mutex>
+#include <optional>
 #include <utils/RefBase.h>
 #include <gui/IProducerListener.h>
 #include <gui/Surface.h>
@@ -143,7 +144,7 @@
      * Camera3Stream interface
      */
 
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     /**
      * Set the transform on the output stream; one of the
@@ -154,21 +155,21 @@
     /**
      * Return if this output stream is for video encoding.
      */
-    bool isVideoStream() const;
+    bool isVideoStream();
     /**
      * Return if this output stream is consumed by hardware composer.
      */
-    bool isConsumedByHWComposer() const;
+    bool isConsumedByHWComposer();
 
     /**
      * Return if this output stream is consumed by hardware texture.
      */
-    bool isConsumedByHWTexture() const;
+    bool isConsumedByHWTexture();
 
     /**
      * Return if this output stream is consumed by CPU.
      */
-    bool isConsumedByCPU() const;
+    bool isConsumedByCPU();
 
     /**
      * Return if the consumer configuration of this stream is deferred.
@@ -304,8 +305,7 @@
     virtual status_t disconnectLocked();
     status_t fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence);
 
-    status_t getEndpointUsageForSurface(uint64_t *usage,
-            const sp<Surface>& surface) const;
+    status_t getEndpointUsageForSurface(uint64_t *usage, const sp<Surface>& surface);
     status_t configureConsumerQueueLocked(bool allowPreviewRespace);
 
     // Consumer as the output of camera HAL
@@ -326,9 +326,6 @@
 
     bool mTraceFirstBuffer;
 
-    // Name of Surface consumer
-    std::string           mConsumerName;
-
     /**
      * GraphicBuffer manager this stream is registered to. Used to replace the buffer
      * allocation/deallocation role of BufferQueue.
@@ -366,6 +363,11 @@
      */
     uint64_t    mConsumerUsage;
 
+    /**
+     * Consumer end point usage flag retrieved from the buffer queue.
+     */
+    std::optional<uint64_t>    mConsumerUsageCachedValue;
+
     // Whether to drop valid buffers.
     bool mDropBuffers;
 
@@ -388,8 +390,6 @@
     virtual status_t getBufferLocked(camera_stream_buffer *buffer,
             const std::vector<size_t>& surface_ids);
 
-    virtual status_t getBuffersLocked(/*out*/std::vector<OutstandingBuffer>* buffers) override;
-
     virtual status_t returnBufferLocked(
             const camera_stream_buffer &buffer,
             nsecs_t timestamp, nsecs_t readoutTimestamp,
@@ -401,7 +401,7 @@
 
     virtual status_t configureQueueLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
     /**
      * Private methods
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 1ab8162..77edfbe 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -39,7 +39,7 @@
     /**
      * Return if this output stream is for video encoding.
      */
-    virtual bool isVideoStream() const = 0;
+    virtual bool isVideoStream() = 0;
 
     /**
      * Return if the consumer configuration of this stream is deferred.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 450f3dd..89e08a1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -45,13 +45,17 @@
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <camera_metadata_hidden.h>
+#include <com_android_internal_camera_flags.h>
 
 #include "device3/Camera3OutputUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 
 #include "system/camera_metadata.h"
 
 using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 namespace camera3 {
@@ -495,7 +499,8 @@
     states.inflightIntf.onInflightEntryRemovedLocked(duration);
 }
 
-void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
+void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx,
+        std::vector<BufferToReturn> *returnableBuffers) {
     InFlightRequestMap& inflightMap = states.inflightMap;
     const InFlightRequest &request = inflightMap.valueAt(idx);
     const uint32_t frameNumber = inflightMap.keyAt(idx);
@@ -533,11 +538,13 @@
         assert(request.requestStatus != OK ||
                request.pendingOutputBuffers.size() == 0);
 
-        returnOutputBuffers(
-            states.useHalBufManager, states.listener,
+        collectReturnableOutputBuffers(
+            states.useHalBufManager, states.halBufManagedStreamIds,
+            states.listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+            /*out*/ returnableBuffers,
             /*timestampIncreasing*/true,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
@@ -632,6 +639,7 @@
     // in-flight request and they will be returned when the shutter timestamp
     // arrives. Update the in-flight status and remove the in-flight entry if
     // all result data and shutter timestamp have been received.
+    std::vector<BufferToReturn> returnableBuffers{};
     nsecs_t shutterTimestamp = 0;
     {
         std::lock_guard<std::mutex> l(states.inflightLock);
@@ -793,9 +801,11 @@
         request.pendingOutputBuffers.appendArray(result->output_buffers,
                 result->num_output_buffers);
         if (shutterTimestamp != 0) {
-            returnAndRemovePendingOutputBuffers(
-                states.useHalBufManager, states.listener,
-                request, states.sessionStatsBuilder);
+            collectAndRemovePendingOutputBuffers(
+                states.useHalBufManager, states.halBufManagedStreamIds,
+                states.listener,
+                request, states.sessionStatsBuilder,
+                /*out*/ &returnableBuffers);
         }
 
         if (result->result != NULL && !isPartialResult) {
@@ -820,9 +830,18 @@
                     request.physicalMetadatas);
             }
         }
-        removeInFlightRequestIfReadyLocked(states, idx);
+        removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+        if (!flags::return_buffers_outside_locks()) {
+            finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+        }
     } // scope for states.inFlightLock
 
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
+
     if (result->input_buffer != NULL) {
         if (hasInputBufferInRequest) {
             Camera3Stream *stream =
@@ -843,16 +862,17 @@
     }
 }
 
-void returnOutputBuffers(
+void collectReturnableOutputBuffers(
         bool useHalBufManager,
+        const std::set<int32_t> &halBufferManagedStreams,
         sp<NotificationListener> listener,
         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
         nsecs_t timestamp, nsecs_t readoutTimestamp, bool requested,
         nsecs_t requestTimeNs, SessionStatsBuilder& sessionStatsBuilder,
+        /*out*/ std::vector<BufferToReturn> *returnableBuffers,
         bool timestampIncreasing, const SurfaceMap& outputSurfaces,
-        const CaptureResultExtras &inResultExtras,
+        const CaptureResultExtras &resultExtras,
         ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
-
     for (size_t i = 0; i < numBuffers; i++)
     {
         Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
@@ -862,7 +882,7 @@
         if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
             if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
+                CaptureResultExtras extras = resultExtras;
                 extras.errorStreamId = streamId;
                 listener->notifyError(
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
@@ -871,7 +891,9 @@
         }
 
         if (outputBuffers[i].buffer == nullptr) {
-            if (!useHalBufManager) {
+            if (!useHalBufManager &&
+                    !(flags::session_hal_buf_manager() &&
+                            contains(halBufferManagedStreams, streamId))) {
                 // With HAL buffer management API, HAL sometimes will have to return buffers that
                 // has not got a output buffer handle filled yet. This is though illegal if HAL
                 // buffer management API is not being used.
@@ -885,22 +907,35 @@
         }
 
         const auto& it = outputSurfaces.find(streamId);
-        status_t res = OK;
 
         // Do not return the buffer if the buffer status is error, and the error
         // buffer strategy is CACHE.
         if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
                 errorBufStrategy != ERROR_BUF_CACHE) {
             if (it != outputSurfaces.end()) {
-                res = stream->returnBuffer(
+                returnableBuffers->emplace_back(stream,
                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
-                        it->second, inResultExtras.frameNumber, transform);
+                        it->second, resultExtras,
+                        transform, requested ? requestTimeNs : 0);
             } else {
-                res = stream->returnBuffer(
+                returnableBuffers->emplace_back(stream,
                         outputBuffers[i], timestamp, readoutTimestamp, timestampIncreasing,
-                        std::vector<size_t> (), inResultExtras.frameNumber, transform);
+                        std::vector<size_t> (), resultExtras,
+                        transform, requested ? requestTimeNs : 0 );
             }
         }
+    }
+}
+
+void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+        sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder) {
+    for (auto& b : returnableBuffers) {
+        const int streamId = b.stream->getId();
+
+        status_t res = b.stream->returnBuffer(b.buffer, b.timestamp,
+                b.readoutTimestamp, b.timestampIncreasing,
+                b.surfaceIds, b.resultExtras.frameNumber, b.transform);
+
         // Note: stream may be deallocated at this point, if this buffer was
         // the last reference to it.
         bool dropped = false;
@@ -911,50 +946,55 @@
             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
             dropped = true;
         } else {
-            if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
+            if (b.buffer.status == CAMERA_BUFFER_STATUS_ERROR || b.timestamp == 0) {
                 dropped = true;
             }
         }
-        if (requested) {
+        if (b.requestTimeNs > 0) {
             nsecs_t bufferTimeNs = systemTime();
-            int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
+            int32_t captureLatencyMs = ns2ms(bufferTimeNs - b.requestTimeNs);
             sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
         }
 
         // Long processing consumers can cause returnBuffer timeout for shared stream
         // If that happens, cancel the buffer and send a buffer error to client
-        if (it != outputSurfaces.end() && res == TIMED_OUT &&
-                outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
+        if (b.surfaceIds.size() > 0 && res == TIMED_OUT &&
+                b.buffer.status == CAMERA_BUFFER_STATUS_OK) {
             // cancel the buffer
-            camera_stream_buffer_t sb = outputBuffers[i];
+            camera_stream_buffer_t sb = b.buffer;
             sb.status = CAMERA_BUFFER_STATUS_ERROR;
-            stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
-                    timestampIncreasing, std::vector<size_t> (),
-                    inResultExtras.frameNumber, transform);
+            b.stream->returnBuffer(sb, /*timestamp*/0, /*readoutTimestamp*/0,
+                    b.timestampIncreasing, std::vector<size_t> (),
+                    b.resultExtras.frameNumber, b.transform);
 
             if (listener != nullptr) {
-                CaptureResultExtras extras = inResultExtras;
+                CaptureResultExtras extras = b.resultExtras;
                 extras.errorStreamId = streamId;
                 listener->notifyError(
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
                         extras);
             }
         }
+
     }
 }
 
-void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
+void collectAndRemovePendingOutputBuffers(bool useHalBufManager,
+        const std::set<int32_t> &halBufferManagedStreams,
         sp<NotificationListener> listener, InFlightRequest& request,
-        SessionStatsBuilder& sessionStatsBuilder) {
+        SessionStatsBuilder& sessionStatsBuilder,
+        std::vector<BufferToReturn> *returnableBuffers) {
     bool timestampIncreasing =
             !((request.zslCapture && request.stillCapture) || request.hasInputBuffer);
     nsecs_t readoutTimestamp = request.resultExtras.hasReadoutTimestamp ?
             request.resultExtras.readoutTimestamp : 0;
-    returnOutputBuffers(useHalBufManager, listener,
+    collectReturnableOutputBuffers(useHalBufManager, halBufferManagedStreams, listener,
             request.pendingOutputBuffers.array(),
             request.pendingOutputBuffers.size(),
             request.shutterTimestamp, readoutTimestamp,
-            /*requested*/true, request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
+            /*requested*/true, request.requestTimeNs, sessionStatsBuilder,
+            /*out*/ returnableBuffers,
+            timestampIncreasing,
             request.outputSurfaces, request.resultExtras,
             request.errorBufStrategy, request.transform);
 
@@ -974,6 +1014,9 @@
     ATRACE_CALL();
     ssize_t idx;
 
+    std::vector<BufferToReturn> returnableBuffers{};
+    CaptureResultExtras pendingNotificationResultExtras{};
+
     // Set timestamp for the request in the in-flight tracking
     // and get the request ID to send upstream
     {
@@ -1040,9 +1083,13 @@
                             states.lastCompletedReprocessFrameNumber;
                     r.resultExtras.lastCompletedZslFrameNumber =
                             states.lastCompletedZslFrameNumber;
-                    states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                    if (flags::return_buffers_outside_locks()) {
+                        pendingNotificationResultExtras = r.resultExtras;
+                    } else {
+                        states.listener->notifyShutter(r.resultExtras, msg.timestamp);
+                    }
                 }
-                // send pending result and buffers
+                // send pending result and buffers; this queues them up for delivery later
                 const auto& cameraIdsWithZoom = getCameraIdsWithZoomLocked(
                         inflightMap, r.pendingMetadata, r.cameraIdsWithZoom);
                 sendCaptureResult(states,
@@ -1051,16 +1098,35 @@
                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
                     r.rotateAndCropAuto, cameraIdsWithZoom, r.physicalMetadatas);
             }
-            returnAndRemovePendingOutputBuffers(
-                    states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
+            collectAndRemovePendingOutputBuffers(
+                    states.useHalBufManager, states.halBufManagedStreamIds,
+                    states.listener, r, states.sessionStatsBuilder, &returnableBuffers);
 
-            removeInFlightRequestIfReadyLocked(states, idx);
+            if (!flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
+
+            removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+
         }
     }
     if (idx < 0) {
         SET_ERR("Shutter notification for non-existent frame number %d",
                 msg.frame_number);
     }
+    // Call notifyShutter outside of in-flight mutex
+    if (flags::return_buffers_outside_locks() && pendingNotificationResultExtras.isValid()) {
+        states.listener->notifyShutter(pendingNotificationResultExtras, msg.timestamp);
+    }
+
+    // With no locks held, finish returning buffers to streams, which may take a while since
+    // binder calls are involved
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
+
 }
 
 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
@@ -1106,6 +1172,8 @@
             break;
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
+        {
+            std::vector<BufferToReturn> returnableBuffers{};
             {
                 std::lock_guard<std::mutex> l(states.inflightLock);
                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
@@ -1142,7 +1210,12 @@
 
                         // Check whether the buffers returned. If they returned,
                         // remove inflight request.
-                        removeInFlightRequestIfReadyLocked(states, idx);
+                        removeInFlightRequestIfReadyLocked(states, idx, &returnableBuffers);
+                        if (!flags::return_buffers_outside_locks()) {
+                            finishReturningOutputBuffers(returnableBuffers,
+                                    states.listener, states.sessionStatsBuilder);
+                        }
+
                     }
                 } else {
                     resultExtras.frameNumber = msg.frame_number;
@@ -1151,6 +1224,12 @@
                             resultExtras.frameNumber);
                 }
             }
+
+            if (flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
+
             resultExtras.errorStreamId = streamId;
             if (states.listener != nullptr) {
                 states.listener->notifyError(errorCode, resultExtras);
@@ -1159,6 +1238,7 @@
                         states.cameraId.c_str(), __FUNCTION__);
             }
             break;
+        }
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
             // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
             // callback to the app. Rather, use STATUS_ERROR of image buffers.
@@ -1188,17 +1268,24 @@
 
 void flushInflightRequests(FlushInflightReqStates& states) {
     ATRACE_CALL();
+    std::vector<BufferToReturn> returnableBuffers{};
     { // First return buffers cached in inFlightMap
         std::lock_guard<std::mutex> l(states.inflightLock);
         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
             const InFlightRequest &request = states.inflightMap.valueAt(idx);
-            returnOutputBuffers(
-                states.useHalBufManager, states.listener,
+            collectReturnableOutputBuffers(
+                states.useHalBufManager, states.halBufManagedStreamIds,
+                states.listener,
                 request.pendingOutputBuffers.array(),
                 request.pendingOutputBuffers.size(), /*timestamp*/0, /*readoutTimestamp*/0,
                 /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
+                /*out*/ &returnableBuffers,
                 /*timestampIncreasing*/true, request.outputSurfaces, request.resultExtras,
                 request.errorBufStrategy);
+            if (!flags::return_buffers_outside_locks()) {
+                finishReturningOutputBuffers(returnableBuffers,
+                        states.listener, states.sessionStatsBuilder);
+            }
             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
@@ -1209,6 +1296,10 @@
         states.inflightMap.clear();
         states.inflightIntf.onInflightMapFlushedLocked();
     }
+    if (flags::return_buffers_outside_locks()) {
+        finishReturningOutputBuffers(returnableBuffers,
+                states.listener, states.sessionStatsBuilder);
+    }
 
     // Then return all inflight buffers not returned by HAL
     std::vector<std::pair<int32_t, int32_t>> inflightKeys;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 134c037..75864d7 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -44,15 +44,50 @@
      * Helper methods shared between Camera3Device/Camera3OfflineSession for HAL callbacks
      */
 
-    // helper function to return the output buffers to output streams. The
-    // function also optionally calls notify(ERROR_BUFFER).
-    void returnOutputBuffers(
+    struct BufferToReturn {
+        Camera3StreamInterface *stream;
+        camera_stream_buffer_t buffer;
+        nsecs_t timestamp;
+        nsecs_t readoutTimestamp;
+        bool timestampIncreasing;
+        std::vector<size_t> surfaceIds;
+        const CaptureResultExtras resultExtras;
+        int32_t transform;
+        nsecs_t requestTimeNs;
+
+        BufferToReturn(Camera3StreamInterface *stream,
+                camera_stream_buffer_t buffer,
+                nsecs_t timestamp, nsecs_t readoutTimestamp,
+                bool timestampIncreasing, std::vector<size_t> surfaceIds,
+                const CaptureResultExtras &resultExtras,
+                int32_t transform, nsecs_t requestTimeNs):
+            stream(stream),
+            buffer(buffer),
+            timestamp(timestamp),
+            readoutTimestamp(readoutTimestamp),
+            timestampIncreasing(timestampIncreasing),
+            surfaceIds(surfaceIds),
+            resultExtras(resultExtras),
+            transform(transform),
+            requestTimeNs(requestTimeNs) {}
+    };
+
+    // helper function to return the output buffers to output
+    // streams. The function also optionally calls
+    // notify(ERROR_BUFFER).  Returns the list of buffers to hand back
+    // to streams in returnableBuffers.  Does not make any two-way
+    // binder calls, so suitable for use when critical locks are being
+    // held
+    void collectReturnableOutputBuffers(
             bool useHalBufManager,
+            const std::set<int32_t> &halBufferManagedStreams,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
             const camera_stream_buffer_t *outputBuffers,
             size_t numBuffers, nsecs_t timestamp,
             nsecs_t readoutTimestamp, bool requested, nsecs_t requestTimeNs,
-            SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing = true,
+            SessionStatsBuilder& sessionStatsBuilder,
+            /*out*/ std::vector<BufferToReturn> *returnableBuffers,
+            bool timestampIncreasing = true,
             // The following arguments are only meant for surface sharing use case
             const SurfaceMap& outputSurfaces = SurfaceMap{},
             // Used to send buffer error callback when failing to return buffer
@@ -60,13 +95,24 @@
             ERROR_BUF_STRATEGY errorBufStrategy = ERROR_BUF_RETURN,
             int32_t transform = -1);
 
-    // helper function to return the output buffers to output streams, and
-    // remove the returned buffers from the inflight request's pending buffers
-    // vector.
-    void returnAndRemovePendingOutputBuffers(
+    // helper function to collect the output buffers ready to be
+    // returned to output streams, and to remove these buffers from
+    // the inflight request's pending buffers vector.  Does not make
+    // any two-way binder calls, so suitable for use when critical
+    // locks are being held
+    void collectAndRemovePendingOutputBuffers(
             bool useHalBufManager,
+            const std::set<int32_t> &halBufferManagedStreams,
             sp<NotificationListener> listener, // Only needed when outputSurfaces is not empty
-            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder);
+            InFlightRequest& request, SessionStatsBuilder& sessionStatsBuilder,
+            /*out*/ std::vector<BufferToReturn> *returnableBuffers);
+
+    // Actually return filled output buffers to the consumer to use, using the list
+    // provided by collectReturnableOutputBuffers / collectAndRemovePendingOutputBuffers
+    // Makes two-way binder calls to applications, so do not hold any critical locks when
+    // calling.
+    void finishReturningOutputBuffers(const std::vector<BufferToReturn> &returnableBuffers,
+            sp<NotificationListener> listener, SessionStatsBuilder& sessionStatsBuilder);
 
     // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
     // callbacks
@@ -87,6 +133,7 @@
         uint32_t& nextReprocResultFrameNum;
         uint32_t& nextZslResultFrameNum; // end of outputLock scope
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         const bool usePartialResult;
         const bool needFixupMonoChrome;
         const uint32_t numPartialResults;
@@ -118,6 +165,7 @@
         const std::string& cameraId;
         std::mutex& reqBufferLock; // lock to serialize request buffer calls
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         StreamSet& outputStreams;
         SessionStatsBuilder& sessionStatsBuilder;
         SetErrorInterface& setErrIntf;
@@ -128,6 +176,7 @@
     struct ReturnBufferStates {
         const std::string& cameraId;
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         StreamSet& outputStreams;
         SessionStatsBuilder& sessionStatsBuilder;
         BufferRecordsInterface& bufferRecordsIntf;
@@ -138,6 +187,7 @@
         std::mutex& inflightLock;
         InFlightRequestMap& inflightMap; // end of inflightLock scope
         const bool useHalBufManager;
+        const std::set<int32_t > &halBufManagedStreamIds;
         sp<NotificationListener> listener;
         InflightRequestUpdateInterface& inflightIntf;
         BufferRecordsInterface& bufferRecordsIntf;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index 3ac666b..aca7a67 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -32,13 +32,17 @@
 
 #include <camera/CameraUtils.h>
 #include <camera_metadata_hidden.h>
+#include <com_android_internal_camera_flags.h>
 
 #include "device3/Camera3OutputUtils.h"
+#include "utils/SessionConfigurationUtils.h"
 
 #include "system/camera_metadata.h"
 
 using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
 using namespace android::hardware::camera;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 namespace camera3 {
@@ -207,7 +211,9 @@
 
         bool noBufferReturned = false;
         buffer_handle_t *buffer = nullptr;
-        if (states.useHalBufManager) {
+        if (states.useHalBufManager ||
+                (flags::session_hal_buf_manager() &&
+                        contains(states.halBufManagedStreamIds, bSrc.streamId))) {
             // This is suspicious most of the time but can be correct during flush where HAL
             // has to return capture result before a buffer is requested
             if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
@@ -294,13 +300,15 @@
 template <class VecStreamBufferType>
 void returnStreamBuffersT(ReturnBufferStates& states,
         const VecStreamBufferType& buffers) {
-    if (!states.useHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer managerment",
-                __FUNCTION__, states.cameraId.c_str());
-        return;
-    }
 
     for (const auto& buf : buffers) {
+        if (!states.useHalBufManager &&
+            !(flags::session_hal_buf_manager() &&
+             contains(states.halBufManagedStreamIds, buf.streamId))) {
+            ALOGE("%s: Camera %s does not support HAL buffer management for stream id %d",
+                  __FUNCTION__, states.cameraId.c_str(), buf.streamId);
+            return;
+        }
         if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
             ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
             continue;
@@ -337,9 +345,15 @@
             continue;
         }
         streamBuffer.stream = stream->asHalStream();
-        returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
-                &streamBuffer, /*size*/1, /*timestamp*/ 0, /*readoutTimestamp*/0,
-                /*requested*/false, /*requestTimeNs*/0, states.sessionStatsBuilder);
+        std::vector<BufferToReturn> returnableBuffers{};
+        collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+                /*listener*/nullptr, &streamBuffer, /*size*/1, /*timestamp*/ 0,
+                /*readoutTimestamp*/0, /*requested*/false, /*requestTimeNs*/0,
+                states.sessionStatsBuilder,
+                /*out*/&returnableBuffers);
+        finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
+                states.sessionStatsBuilder);
+
     }
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 22f97bf..485f3f0 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -279,7 +279,7 @@
     return res;
 }
 
-status_t Camera3SharedOutputStream::getEndpointUsage(uint64_t *usage) const {
+status_t Camera3SharedOutputStream::getEndpointUsage(uint64_t *usage) {
 
     status_t res = OK;
     uint64_t u = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 90914d4..818ce17 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -137,7 +137,7 @@
 
     virtual status_t disconnectLocked();
 
-    virtual status_t getEndpointUsage(uint64_t *usage) const;
+    virtual status_t getEndpointUsage(uint64_t *usage);
 
 }; // class Camera3SharedOutputStream
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 701c472..4934203 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -957,7 +957,7 @@
     }
 }
 
-void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const
+void Camera3Stream::dump(int fd, [[maybe_unused]] const Vector<String16> &args)
 {
     mBufferLimitLatency.dump(fd,
             "      Latency histogram for wait on max_buffers");
@@ -969,11 +969,6 @@
     return INVALID_OPERATION;
 }
 
-status_t Camera3Stream::getBuffersLocked(std::vector<OutstandingBuffer>*) {
-    ALOGE("%s: This type of stream does not support output", __FUNCTION__);
-    return INVALID_OPERATION;
-}
-
 status_t Camera3Stream::returnBufferLocked(const camera_stream_buffer &,
                                            nsecs_t, nsecs_t, int32_t, const std::vector<size_t>&) {
     ALOGE("%s: This type of stream does not support output", __FUNCTION__);
@@ -1047,92 +1042,6 @@
     mBufferFreedListener = listener;
 }
 
-status_t Camera3Stream::getBuffers(std::vector<OutstandingBuffer>* buffers,
-        nsecs_t waitBufferTimeout) {
-    ATRACE_CALL();
-    Mutex::Autolock l(mLock);
-    status_t res = OK;
-
-    if (buffers == nullptr) {
-        ALOGI("%s: buffers must not be null!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    size_t numBuffersRequested = buffers->size();
-    if (numBuffersRequested == 0) {
-        ALOGE("%s: 0 buffers are requested!", __FUNCTION__);
-        return BAD_VALUE;
-    }
-
-    // This function should be only called when the stream is configured already.
-    if (mState != STATE_CONFIGURED) {
-        ALOGE("%s: Stream %d: Can't get buffers if stream is not in CONFIGURED state %d",
-                __FUNCTION__, mId, mState);
-        if (mState == STATE_ABANDONED) {
-            return DEAD_OBJECT;
-        } else {
-            return INVALID_OPERATION;
-        }
-    }
-
-    size_t numOutstandingBuffers = getHandoutOutputBufferCountLocked();
-    size_t numCachedBuffers = getCachedOutputBufferCountLocked();
-    size_t maxNumCachedBuffers = getMaxCachedOutputBuffersLocked();
-    // Wait for new buffer returned back if we are running into the limit. There
-    // are 2 limits:
-    // 1. The number of HAL buffers is greater than max_buffers
-    // 2. The number of HAL buffers + cached buffers is greater than max_buffers
-    //    + maxCachedBuffers
-    while (numOutstandingBuffers + numBuffersRequested > camera_stream::max_buffers ||
-            numOutstandingBuffers + numCachedBuffers + numBuffersRequested >
-            camera_stream::max_buffers + maxNumCachedBuffers) {
-        ALOGV("%s: Already dequeued %zu(+%zu) output buffers and requesting %zu "
-                "(max is %d(+%zu)), waiting.", __FUNCTION__, numOutstandingBuffers,
-                numCachedBuffers, numBuffersRequested, camera_stream::max_buffers,
-                maxNumCachedBuffers);
-        nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
-        if (waitBufferTimeout < kWaitForBufferDuration) {
-            waitBufferTimeout = kWaitForBufferDuration;
-        }
-        res = mOutputBufferReturnedSignal.waitRelative(mLock, waitBufferTimeout);
-        nsecs_t waitEnd = systemTime(SYSTEM_TIME_MONOTONIC);
-        mBufferLimitLatency.add(waitStart, waitEnd);
-        if (res != OK) {
-            if (res == TIMED_OUT) {
-                ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
-                        __FUNCTION__, waitBufferTimeout / 1000000LL,
-                        camera_stream::max_buffers);
-            }
-            return res;
-        }
-        size_t updatedNumOutstandingBuffers = getHandoutOutputBufferCountLocked();
-        size_t updatedNumCachedBuffers = getCachedOutputBufferCountLocked();
-        if (updatedNumOutstandingBuffers >= numOutstandingBuffers &&
-                updatedNumCachedBuffers == numCachedBuffers) {
-            ALOGE("%s: outstanding buffer count goes from %zu to %zu, "
-                    "getBuffer(s) call must not run in parallel!", __FUNCTION__,
-                    numOutstandingBuffers, updatedNumOutstandingBuffers);
-            return INVALID_OPERATION;
-        }
-        numOutstandingBuffers = updatedNumOutstandingBuffers;
-        numCachedBuffers = updatedNumCachedBuffers;
-    }
-
-    res = getBuffersLocked(buffers);
-    if (res == OK) {
-        for (auto& outstandingBuffer : *buffers) {
-            camera_stream_buffer* buffer = outstandingBuffer.outBuffer;
-            fireBufferListenersLocked(*buffer, /*acquired*/true, /*output*/true);
-            if (buffer->buffer) {
-                Mutex::Autolock l(mOutstandingBuffersLock);
-                mOutstandingBuffers.push_back(*buffer->buffer);
-            }
-        }
-    }
-
-    return res;
-}
-
 void Camera3Stream::queueHDRMetadata(buffer_handle_t buffer, sp<ANativeWindow>& anw,
         int64_t dynamicRangeProfile) {
     auto& mapper = GraphicBufferMapper::get();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index f06ccf3..ccd1044 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -343,12 +343,6 @@
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     /**
-     * Similar to getBuffer() except this method fills multiple buffers.
-     */
-    status_t         getBuffers(std::vector<OutstandingBuffer>* buffers,
-            nsecs_t waitBufferTimeout);
-
-    /**
      * Return a buffer to the stream after use by the HAL.
      *
      * Multiple surfaces could share the same HAL stream, but a request may
@@ -429,7 +423,7 @@
     /**
      * Debug dump of the stream's state.
      */
-    virtual void     dump(int fd, const Vector<String16> &args) const;
+    virtual void     dump(int fd, const Vector<String16> &args);
 
     /**
      * Add a camera3 buffer listener. Adding the same listener twice has
@@ -535,8 +529,6 @@
             nsecs_t timestamp, nsecs_t readoutTimestamp, int32_t transform,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
-    virtual status_t getBuffersLocked(std::vector<OutstandingBuffer>*);
-
     virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size* size);
 
     virtual status_t returnInputBufferLocked(
@@ -570,7 +562,7 @@
 
     // Get the usage flags for the other endpoint, or return
     // INVALID_OPERATION if they cannot be obtained.
-    virtual status_t getEndpointUsage(uint64_t *usage) const = 0;
+    virtual status_t getEndpointUsage(uint64_t *usage) = 0;
 
     // Return whether the buffer is in the list of outstanding buffers.
     bool isOutstandingBuffer(const camera_stream_buffer& buffer) const;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 7fa6273..4df8193 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -395,11 +395,6 @@
          */
         std::vector<size_t> surface_ids;
     };
-    /**
-     * Similar to getBuffer() except this method fills multiple buffers.
-     */
-    virtual status_t getBuffers(std::vector<OutstandingBuffer>* buffers,
-            nsecs_t waitBufferTimeout) = 0;
 
     /**
      * Return a buffer to the stream after use by the HAL.
@@ -484,7 +479,7 @@
     /**
      * Debug dump of the stream's state.
      */
-    virtual void     dump(int fd, const Vector<String16> &args) const = 0;
+    virtual void     dump(int fd, const Vector<String16> &args) = 0;
 
     virtual void     addBufferListener(
             wp<Camera3StreamBufferListener> listener) = 0;
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index a7bd312..3626f20 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -35,6 +35,7 @@
     uint32_t operation_mode;
     bool input_is_multi_resolution;
     bool use_hal_buf_manager = false;
+    std::set<int32_t> hal_buffer_managed_streams;
 } camera_stream_configuration_t;
 
 typedef struct camera_capture_request {
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 97475f0..7f30f5e 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -59,6 +59,7 @@
 #include <com_android_internal_camera_flags.h>
 
 #include "utils/CameraTraces.h"
+#include "utils/SessionConfigurationUtils.h"
 #include "mediautils/SchedulingPolicyService.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3InputStream.h"
@@ -67,7 +68,6 @@
 #include "device3/aidl/AidlCamera3OutputUtils.h"
 #include "device3/aidl/AidlCamera3OfflineSession.h"
 #include "CameraService.h"
-#include "utils/CameraThreadState.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 #include "utils/CameraServiceProxyWrapper.h"
@@ -79,6 +79,7 @@
 #include "AidlCamera3Device.h"
 
 using namespace android::camera3;
+using namespace android::camera3::SessionConfigurationUtils;
 using namespace aidl::android::hardware;
 using aidl::android::hardware::camera::metadata::SensorPixelMode;
 using aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
@@ -171,10 +172,11 @@
 
 AidlCamera3Device::AidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
         bool legacyClient) :
-        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
-        legacyClient) {
+        Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+                overrideForPerfClass, overrideToPortrait, legacyClient) {
     mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
@@ -337,6 +339,16 @@
 
     mBatchSizeLimitEnabled = (deviceVersion >= CAMERA_DEVICE_API_VERSION_1_2);
 
+    camera_metadata_entry readoutSupported = mDeviceInfo.find(ANDROID_SENSOR_READOUT_TIMESTAMP);
+    if (readoutSupported.count == 0) {
+        ALOGW("%s: Could not find value corresponding to ANDROID_SENSOR_READOUT_TIMESTAMP. "
+              "Assuming true.", __FUNCTION__);
+        mSensorReadoutTimestampSupported = true;
+    } else {
+        mSensorReadoutTimestampSupported =
+                readoutSupported.data.u8[0] == ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
+    }
+
     return initializeCommonLocked();
 }
 
@@ -400,7 +412,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -442,7 +454,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -450,7 +462,7 @@
         mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
-        camera3::notify(states, msg);
+        camera3::notify(states, msg, mSensorReadoutTimestampSupported);
     }
     return ::ndk::ScopedAStatus::ok();
 
@@ -531,7 +543,7 @@
         }
 
         // When not using HAL buf manager, only allow streams requested by app to be preserved
-        if (!mUseHalBufManager) {
+        if (!isHalBufferManagedStream(id)) {
             if (std::find(streamsToKeep.begin(), streamsToKeep.end(), id) == streamsToKeep.end()) {
                 SET_ERR("stream ID %d must not be switched to offline!", id);
                 return UNKNOWN_ERROR;
@@ -611,17 +623,18 @@
     // TODO: check if we need to lock before copying states
     //       though technically no other thread should be talking to Camera3Device at this point
     Camera3OfflineStates offlineStates(
-            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
-            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
-            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
-            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
-            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
-            mZoomRatioMappers, mRotateAndCropMappers);
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mHalBufManagedStreamIds,
+            mNeedFixupMonochromeTags, mUsePartialResult, mNumPartialResults,
+            mLastCompletedRegularFrameNumber, mLastCompletedReprocessFrameNumber,
+            mLastCompletedZslFrameNumber, mNextResultFrameNumber,
+            mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+            mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+            mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+            mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers);
 
     *session = new AidlCamera3OfflineSession(mId, inputStream, offlineStreamSet,
-            std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
+                                             std::move(bufferRecords), offlineReqs, offlineStates,
+                                             offlineSession, mSensorReadoutTimestampSupported);
 
     // Delete all streams that has been transferred to offline session
     Mutex::Autolock l(mLock);
@@ -688,8 +701,8 @@
         aidl::android::hardware::camera::device::BufferRequestStatus* status) {
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams,
-        mSessionStatsBuilder, *this, *(mInterface), *this};
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mHalBufManagedStreamIds,
+        mOutputStreams, mSessionStatsBuilder, *this, *(mInterface), *this};
     camera3::requestStreamBuffers(states, bufReqs, outBuffers, status);
     return ::ndk::ScopedAStatus::ok();
 }
@@ -713,7 +726,7 @@
 ::ndk::ScopedAStatus AidlCamera3Device::returnStreamBuffers(
         const std::vector<camera::device::StreamBuffer>& buffers) {
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams,  mSessionStatsBuilder,
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams,  mSessionStatsBuilder,
         *(mInterface)};
     camera3::returnStreamBuffers(states, buffers);
     return ::ndk::ScopedAStatus::ok();
@@ -905,6 +918,12 @@
         camera3::camera_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
+        // For stream configurations with multi res streams, hal buffer manager has to be used.
+        if (!flags::session_hal_buf_manager() && cam3stream->getHalStreamGroupId() != -1 &&
+                src->stream_type != CAMERA_STREAM_INPUT) {
+            mUseHalBufManager = true;
+            config->use_hal_buf_manager = true;
+        }
         cam3stream->setBufferFreedListener(this);
         int streamId = cam3stream->getId();
         StreamType streamType;
@@ -975,31 +994,38 @@
     requestedConfiguration.multiResolutionInputImage = config->input_is_multi_resolution;
     requestedConfiguration.logId = logId;
     ndk::ScopedAStatus err = ndk::ScopedAStatus::ok();
+    int32_t interfaceVersion = 0;
     camera::device::ConfigureStreamsRet configureStreamsRet;
-    if (flags::session_hal_buf_manager()) {
-        int32_t interfaceVersion = 0;
-        err = mAidlSession->getInterfaceVersion(&interfaceVersion);
-        if (!err.isOk()) {
-            ALOGE("%s: Transaction error getting interface version: %s", __FUNCTION__,
-                    err.getMessage());
-            return AidlProviderInfo::mapToStatusT(err);
-        }
-        if (interfaceVersion >= AIDL_DEVICE_SESSION_V3 && mSupportSessionHalBufManager) {
-            err = mAidlSession->configureStreamsV2(requestedConfiguration, &configureStreamsRet);
-            finalConfiguration = std::move(configureStreamsRet.halStreams);
-        } else {
-            err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
-        }
+    err = mAidlSession->getInterfaceVersion(&interfaceVersion);
+    if (!err.isOk()) {
+        ALOGE("%s: Transaction error getting interface version: %s", __FUNCTION__,
+              err.getMessage());
+        return AidlProviderInfo::mapToStatusT(err);
+    }
+    if (flags::session_hal_buf_manager() && interfaceVersion >= AIDL_DEVICE_SESSION_V3
+            && mSupportSessionHalBufManager) {
+        err = mAidlSession->configureStreamsV2(requestedConfiguration, &configureStreamsRet);
+        finalConfiguration = std::move(configureStreamsRet.halStreams);
     } else {
         err = mAidlSession->configureStreams(requestedConfiguration, &finalConfiguration);
     }
+
     if (!err.isOk()) {
         ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
         return AidlProviderInfo::mapToStatusT(err);
     }
-    if (flags::session_hal_buf_manager() && mSupportSessionHalBufManager) {
-        mUseHalBufManager = configureStreamsRet.enableHalBufferManager;
-        config->use_hal_buf_manager = configureStreamsRet.enableHalBufferManager;
+
+    if (flags::session_hal_buf_manager()) {
+        std::set<int32_t> halBufferManagedStreamIds;
+        for (const auto &halStream: finalConfiguration) {
+            if ((interfaceVersion >= AIDL_DEVICE_SESSION_V3 &&
+                    mSupportSessionHalBufManager && halStream.enableHalBufferManager)
+                    || mUseHalBufManager) {
+                halBufferManagedStreamIds.insert(halStream.id);
+            }
+        }
+        mHalBufManagedStreamIds = std::move(halBufferManagedStreamIds);
+        config->hal_buffer_managed_streams = mHalBufManagedStreamIds;
     }
     // And convert output stream configuration from AIDL
     for (size_t i = 0; i < config->num_streams; i++) {
@@ -1070,9 +1096,9 @@
             }
             dstStream->setUsage(
                     mapProducerToFrameworkUsage(src.producerUsage));
-
             if (flags::session_hal_buf_manager()) {
-                dstStream->setHalBufferManager(mUseHalBufManager);
+                dstStream->setHalBufferManager(
+                        contains(config->hal_buffer_managed_streams, streamId));
             }
         }
         dst->max_buffers = src.maxBuffers;
@@ -1396,7 +1422,7 @@
                     handlesCreated->push_back(acquireFence);
                 }
                 dst.acquireFence = camera3::dupToAidlIfNotNull(acquireFence);
-            } else if (mUseHalBufManager) {
+            } else if (isHalBufferManagedStream(streamId)) {
                 // HAL buffer management path
                 dst.bufferId = BUFFER_ID_NO_BUFFER;
                 dst.buffer = aidl::android::hardware::common::NativeHandle();
@@ -1410,7 +1436,7 @@
             dst.releaseFence = aidl::android::hardware::common::NativeHandle();
 
             // Output buffers are empty when using HAL buffer manager
-            if (!mUseHalBufManager) {
+            if (!isHalBufferManagedStream(streamId)) {
                 mBufferRecords.pushInflightBuffer(
                         captureRequest->frameNumber, streamId, src->buffer);
                 inflightBuffers->push_back(std::make_pair(captureRequest->frameNumber, streamId));
@@ -1456,8 +1482,9 @@
                 bool supportCameraMute,
                 bool overrideToPortrait,
                 bool supportSettingsOverride) :
-          RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
+          RequestThread(parent, statusTracker, interface, sessionParamKeys,
+                  useHalBufManager, supportCameraMute, overrideToPortrait,
+                  supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
@@ -1690,7 +1717,8 @@
                 bool overrideToPortrait,
                 bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
+            useHalBufManager, supportCameraMute, overrideToPortrait,
+            supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 90e2f97..ac29bbc 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -41,6 +41,7 @@
     friend class AidlCameraDeviceCallbacks;
     explicit AidlCamera3Device(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+            std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
             const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
@@ -289,6 +290,9 @@
     // capture requests.
     bool mBatchSizeLimitEnabled = false;
 
+    // Whether the HAL supports reporting sensor readout timestamp
+    bool mSensorReadoutTimestampSupported = true;
+
 }; // class AidlCamera3Device
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 01c4e88..f8308df 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -122,7 +122,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -169,7 +169,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
@@ -177,7 +177,7 @@
         /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
-        camera3::notify(states, msg);
+        camera3::notify(states, msg, mSensorReadoutTimestampSupported);
     }
     return ::ndk::ScopedAStatus::ok();
 }
@@ -208,7 +208,8 @@
     }
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager,
+        mHalBufManagedStreamIds, mOutputStreams, mSessionStatsBuilder,
         *this, mBufferRecords, *this};
     camera3::requestStreamBuffers(states, bufReqs, buffers, status);
     return ::ndk::ScopedAStatus::ok();
@@ -241,7 +242,7 @@
     }
 
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams, mSessionStatsBuilder,
         mBufferRecords};
 
     camera3::returnStreamBuffers(states, buffers);
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index 33b638c..f8fdeb9 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -105,19 +105,20 @@
     };
 
     // initialize by Camera3Device.
-    explicit AidlCamera3OfflineSession(const std::string& id,
-            const sp<camera3::Camera3Stream>& inputStream,
-            const camera3::StreamSet& offlineStreamSet,
-            camera3::BufferRecords&& bufferRecords,
+    explicit AidlCamera3OfflineSession(
+            const std::string& id, const sp<camera3::Camera3Stream>& inputStream,
+            const camera3::StreamSet& offlineStreamSet, camera3::BufferRecords&& bufferRecords,
             const camera3::InFlightRequestMap& offlineReqs,
             const Camera3OfflineStates& offlineStates,
             std::shared_ptr<aidl::android::hardware::camera::device::ICameraOfflineSession>
-                    offlineSession) :
-      Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
-              offlineReqs, offlineStates),
-      mSession(offlineSession) {
-        mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
-      };
+                    offlineSession,
+            bool sensorReadoutTimestampSupported)
+        : Camera3OfflineSession(id, inputStream, offlineStreamSet, std::move(bufferRecords),
+                                offlineReqs, offlineStates),
+          mSession(offlineSession),
+          mSensorReadoutTimestampSupported(sensorReadoutTimestampSupported) {
+            mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
+    };
 
     /**
      * End of CameraOfflineSessionBase interface
@@ -130,6 +131,8 @@
 
     std::shared_ptr<AidlCameraDeviceCallbacks> mCallbacks;
 
+    bool mSensorReadoutTimestampSupported;
+
     virtual void closeSessionLocked() override;
 
     virtual void releaseSessionLocked() override;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index 74d4230..d9c8e57 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -67,7 +67,8 @@
 }
 
 void notify(CaptureOutputStates& states,
-        const aidl::android::hardware::camera::device::NotifyMsg& msg) {
+            const aidl::android::hardware::camera::device::NotifyMsg& msg,
+            bool hasReadoutTimestamp) {
 
     using ErrorCode = aidl::android::hardware::camera::device::ErrorCode;
     using Tag = aidl::android::hardware::camera::device::NotifyMsg::Tag;
@@ -110,8 +111,9 @@
             m.type = CAMERA_MSG_SHUTTER;
             m.message.shutter.frame_number = msg.get<Tag::shutter>().frameNumber;
             m.message.shutter.timestamp = msg.get<Tag::shutter>().timestamp;
-            m.message.shutter.readout_timestamp_valid = true;
-            m.message.shutter.readout_timestamp = msg.get<Tag::shutter>().readoutTimestamp;
+            m.message.shutter.readout_timestamp_valid = hasReadoutTimestamp;
+            m.message.shutter.readout_timestamp =
+                    hasReadoutTimestamp ? msg.get<Tag::shutter>().readoutTimestamp : 0LL;
             break;
     }
     notify(states, &m);
@@ -143,12 +145,6 @@
     std::lock_guard<std::mutex> lock(states.reqBufferLock);
     std::vector<StreamBufferRet> bufRets;
     outBuffers->clear();
-    if (!states.useHalBufManager) {
-        ALOGE("%s: Camera %s does not support HAL buffer management",
-                __FUNCTION__, states.cameraId.c_str());
-        *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
-        return;
-    }
 
     SortedVector<int32_t> streamIds;
     ssize_t sz = streamIds.setCapacity(bufReqs.size());
@@ -174,6 +170,13 @@
             *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
             return;
         }
+        if (!states.useHalBufManager &&
+                !contains(states.halBufManagedStreamIds, bufReq.streamId)) {
+            ALOGE("%s: Camera %s does not support HAL buffer management for stream id %d",
+                  __FUNCTION__, states.cameraId.c_str(), bufReq.streamId);
+            *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
+            return;
+        }
         streamIds.add(bufReq.streamId);
     }
 
@@ -316,10 +319,15 @@
                 sb.acquire_fence = -1;
                 sb.status = CAMERA_BUFFER_STATUS_ERROR;
             }
-            returnOutputBuffers(states.useHalBufManager, nullptr,
-                    streamBuffers.data(), numAllocatedBuffers, 0,
-                    0, false,
-                    0, states.sessionStatsBuilder);
+            std::vector<BufferToReturn> returnableBuffers{};
+            collectReturnableOutputBuffers(states.useHalBufManager, states.halBufManagedStreamIds,
+                    /*listener*/ nullptr,
+                    streamBuffers.data(), numAllocatedBuffers, /*timestamp*/ 0,
+                    /*readoutTimestamp*/ 0, /*requested*/ false,
+                    /*requestTimeNs*/ 0, states.sessionStatsBuilder,
+                    /*out*/ &returnableBuffers);
+            finishReturningOutputBuffers(returnableBuffers, /*listener*/ nullptr,
+                    states.sessionStatsBuilder);
             for (auto buf : newBuffers) {
                 states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
             }
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
index e795624..d3a8ede 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.h
@@ -79,11 +79,8 @@
                     &physicalCameraMetadata);
 
     void notify(CaptureOutputStates& states,
-        const aidl::android::hardware::camera::device::NotifyMsg& msg,
-        bool hasReadoutTime, uint64_t readoutTime);
-
-    void notify(CaptureOutputStates& states,
-        const aidl::android::hardware::camera::device::NotifyMsg& msg);
+            const aidl::android::hardware::camera::device::NotifyMsg& msg,
+            bool hasReadoutTimestamp);
 
     void requestStreamBuffers(RequestBufferStates& states,
         const std::vector<aidl::android::hardware::camera::device::BufferRequest>& bufReqs,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 4488067..f2e618f 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -47,6 +47,7 @@
 #include <utils/Timers.h>
 #include <cutils/properties.h>
 #include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
 
 #include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
@@ -66,6 +67,7 @@
 using namespace android::hardware::camera;
 using namespace android::hardware::camera::device::V3_2;
 using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 
@@ -307,7 +309,8 @@
         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
         requestStreamBuffers_cb _hidl_cb) {
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mHalBufManagedStreamIds,
+        mOutputStreams, mSessionStatsBuilder,
         *this, *mInterface, *this};
     camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
@@ -316,7 +319,8 @@
 hardware::Return<void> HidlCamera3Device::returnStreamBuffers(
         const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, *mInterface};
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams,
+        mSessionStatsBuilder, *mInterface};
     camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
 }
@@ -362,7 +366,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -425,7 +429,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -473,7 +477,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -641,14 +645,14 @@
     // TODO: check if we need to lock before copying states
     //       though technically no other thread should be talking to Camera3Device at this point
     Camera3OfflineStates offlineStates(
-            mTagMonitor, mVendorTagId, mUseHalBufManager, mNeedFixupMonochromeTags,
-            mUsePartialResult, mNumPartialResults, mLastCompletedRegularFrameNumber,
-            mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
-            mNextResultFrameNumber, mNextReprocessResultFrameNumber,
-            mNextZslStillResultFrameNumber, mNextShutterFrameNumber,
-            mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
-            mDeviceInfo, mPhysicalDeviceInfoMap, mDistortionMappers,
-            mZoomRatioMappers, mRotateAndCropMappers);
+            mTagMonitor, mVendorTagId, mUseHalBufManager, mHalBufManagedStreamIds,
+            mNeedFixupMonochromeTags, mUsePartialResult, mNumPartialResults,
+            mLastCompletedRegularFrameNumber, mLastCompletedReprocessFrameNumber,
+            mLastCompletedZslFrameNumber, mNextResultFrameNumber,
+            mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
+            mNextShutterFrameNumber, mNextReprocessShutterFrameNumber,
+            mNextZslStillShutterFrameNumber, mDeviceInfo, mPhysicalDeviceInfoMap,
+            mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers);
 
     *session = new HidlCamera3OfflineSession(mId, inputStream, offlineStreamSet,
             std::move(bufferRecords), offlineReqs, offlineStates, offlineSession);
@@ -716,7 +720,8 @@
                 bool overrideToPortrait,
                 bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute, overrideToPortrait, supportSettingsOverride);
+                useHalBufManager, supportCameraMute, overrideToPortrait,
+                supportSettingsOverride);
 };
 
 sp<Camera3Device::Camera3DeviceInjectionMethods>
@@ -909,6 +914,7 @@
     requestedConfiguration3_2.streams.resize(config->num_streams);
     requestedConfiguration3_4.streams.resize(config->num_streams);
     requestedConfiguration3_7.streams.resize(config->num_streams);
+    mHalBufManagedStreamIds.clear();
     for (size_t i = 0; i < config->num_streams; i++) {
         device::V3_2::Stream &dst3_2 = requestedConfiguration3_2.streams[i];
         device::V3_4::Stream &dst3_4 = requestedConfiguration3_4.streams[i];
@@ -922,6 +928,9 @@
         switch (src->stream_type) {
             case CAMERA_STREAM_OUTPUT:
                 streamType = StreamType::OUTPUT;
+                if (flags::session_hal_buf_manager() && mUseHalBufManager) {
+                    mHalBufManagedStreamIds.insert(streamId);
+                }
                 break;
             case CAMERA_STREAM_INPUT:
                 streamType = StreamType::INPUT;
@@ -931,6 +940,7 @@
                         __FUNCTION__, streamId, config->streams[i]->stream_type);
                 return BAD_VALUE;
         }
+
         dst3_2.id = streamId;
         dst3_2.streamType = streamType;
         dst3_2.width = src->width;
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 350b072..f11db5d 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -33,10 +33,11 @@
 
     explicit HidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
         const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
         bool legacyClient = false) :
-        Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
-                legacyClient) { }
+        Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
+                overrideForPerfClass, overrideToPortrait, legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index e328ef6..aa4b762 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -103,7 +103,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -145,7 +145,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -182,7 +182,7 @@
         mNextReprocessShutterFrameNumber, mNextZslStillShutterFrameNumber,
         mNextResultFrameNumber,
         mNextReprocessResultFrameNumber, mNextZslStillResultFrameNumber,
-        mUseHalBufManager, mUsePartialResult, mNeedFixupMonochromeTags,
+        mUseHalBufManager, mHalBufManagedStreamIds, mUsePartialResult, mNeedFixupMonochromeTags,
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
@@ -207,7 +207,8 @@
     }
 
     RequestBufferStates states {
-        mId, mRequestBufferInterfaceLock, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder,
+        mId, mRequestBufferInterfaceLock, mUseHalBufManager,mHalBufManagedStreamIds,
+        mOutputStreams, mSessionStatsBuilder,
         *this, mBufferRecords, *this};
     camera3::requestStreamBuffers(states, bufReqs, _hidl_cb);
     return hardware::Void();
@@ -224,7 +225,8 @@
     }
 
     ReturnBufferStates states {
-        mId, mUseHalBufManager, mOutputStreams, mSessionStatsBuilder, mBufferRecords};
+        mId, mUseHalBufManager, mHalBufManagedStreamIds, mOutputStreams, mSessionStatsBuilder,
+        mBufferRecords};
 
     camera3::returnStreamBuffers(states, buffers);
     return hardware::Void();
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
index 2b81224..d28c7ab 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
@@ -16,6 +16,7 @@
 
 #include <hidl/AidlCameraServiceListener.h>
 #include <hidl/Utils.h>
+#include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
 namespace android {
@@ -29,7 +30,10 @@
 typedef frameworks::cameraservice::service::V2_1::ICameraServiceListener HCameraServiceListener2_1;
 
 binder::Status H2BCameraServiceListener::onStatusChanged(
-    int32_t status, const std::string& cameraId) {
+    int32_t status, const std::string& cameraId, int32_t deviceId) {
+  if (deviceId != kDefaultDeviceId) {
+      return binder::Status::ok();
+  }
   HCameraDeviceStatus hCameraDeviceStatus = convertToHidlCameraDeviceStatus(status);
   CameraStatusAndId cameraStatusAndId;
   cameraStatusAndId.deviceStatus = hCameraDeviceStatus;
@@ -44,7 +48,10 @@
 
 binder::Status H2BCameraServiceListener::onPhysicalCameraStatusChanged(
     int32_t status, const std::string& cameraId,
-    const std::string& physicalCameraId) {
+    const std::string& physicalCameraId, int32_t deviceId) {
+  if (deviceId != kDefaultDeviceId) {
+      return binder::Status::ok();
+  }
   auto cast2_1 = HCameraServiceListener2_1::castFrom(mBase);
   sp<HCameraServiceListener2_1> interface2_1 = nullptr;
   if (cast2_1.isOk()) {
@@ -66,13 +73,13 @@
 }
 
 ::android::binder::Status H2BCameraServiceListener::onTorchStatusChanged(
-    int32_t, const std::string&) {
+    [[maybe_unused]] int32_t, [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t) {
   // We don't implement onTorchStatusChanged
   return binder::Status::ok();
 }
 
 ::android::binder::Status H2BCameraServiceListener::onTorchStrengthLevelChanged(
-    const std::string&, int32_t) {
+    [[maybe_unused]] const std::string&, [[maybe_unused]] int32_t, [[maybe_unused]] int32_t) {
   return binder::Status::ok();
 }
 
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 91a4c16..78fca4e 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -47,25 +47,28 @@
     ~H2BCameraServiceListener() { }
 
     virtual ::android::binder::Status onStatusChanged(int32_t status,
-            const std::string& cameraId) override;
+            const std::string& cameraId, int32_t deviceId) override;
     virtual ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
             const std::string& cameraId,
-            const std::string& physicalCameraId) override;
+            const std::string& physicalCameraId,
+            int32_t deviceId) override;
 
     virtual ::android::binder::Status onTorchStatusChanged(
-            int32_t status, const std::string& cameraId) override;
+            int32_t status, const std::string& cameraId, int32_t deviceId) override;
     virtual ::android::binder::Status onTorchStrengthLevelChanged(
-            const std::string& cameraId, int32_t newStrengthLevel) override;
+            const std::string& cameraId, int32_t newStrengthLevel, int32_t deviceId) override;
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
-    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageId*/) {
+    virtual binder::Status onCameraOpened([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] const std::string& /*clientPackageId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) {
         // empty implementation
         return binder::Status::ok();
     }
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+    virtual binder::Status onCameraClosed([[maybe_unused]] const std::string& /*cameraId*/,
+            [[maybe_unused]] int32_t /*deviceId*/) {
         // empty implementation
         return binder::Status::ok();
     }
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 59fc1cd..d607d10 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -25,6 +25,7 @@
 #include <hidl/Utils.h>
 #include <android/hardware/camera/device/3.2/types.h>
 #include <android-base/properties.h>
+#include <utils/Utils.h>
 
 namespace android {
 namespace frameworks {
@@ -58,7 +59,7 @@
     const sp<hardware::camera2::ICameraDeviceUser> &deviceRemote)
   : mDeviceRemote(deviceRemote) {
     mInitSuccess = initDevice();
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
 }
 
 bool HidlCameraDeviceUser::initDevice() {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 94bf653..8f25ad6 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -25,6 +25,9 @@
 
 #include <hidl/HidlTransportSupport.h>
 
+#include <camera/CameraUtils.h>
+#include <utils/Utils.h>
+
 namespace android {
 namespace frameworks {
 namespace cameraservice {
@@ -56,8 +59,8 @@
 }
 
 HidlCameraService::HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) {
-    mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
-};
+    mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
+}
 
 Return<void>
 HidlCameraService::getCameraCharacteristics(const hidl_string& cameraId,
@@ -67,7 +70,7 @@
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                &cameraMetadata);
+                kDefaultDeviceId, 0, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -119,7 +122,7 @@
             callbacks, cameraId, std::string(), {},
             hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-            /*out*/&deviceRemote);
+            kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index 9dd657c..c710671 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -57,6 +57,7 @@
         "android.hardware.camera.device@3.5",
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
+        "camera_platform_flags_c_lib",
     ],
     fuzz_config: {
         cc: [
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 854c342..778b428 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -27,6 +27,7 @@
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/CameraUtils.h>
 #include <camera/camera2/OutputConfiguration.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -147,7 +148,7 @@
         mAutoFocusMessage = true;
         mAutoFocusCondition.broadcast();
     }
-};
+}
 
 void CameraFuzzer::dataCallback(int32_t msgType, const sp<IMemory> & /*data*/,
                                 camera_frame_metadata_t *) {
@@ -169,7 +170,7 @@
         default:
             break;
     }
-};
+}
 
 status_t CameraFuzzer::waitForPreviewStart() {
     status_t rc = NO_ERROR;
@@ -215,7 +216,7 @@
     } else {
         camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
     }
-    mCameraService->getNumberOfCameras(camType, &mNumCameras);
+    mCameraService->getNumberOfCameras(camType, kDefaultDeviceId, /*devicePolicy*/0, &mNumCameras);
 }
 
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -235,11 +236,13 @@
     mCameraService->getCameraVendorTagCache(&cache);
 
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, &cameraInfo);
+    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+            /*devicePolicy*/0, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &metadata);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+            kDefaultDeviceId, /*devicePolicy*/0, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -321,12 +324,13 @@
     std::string cameraIdStr = std::to_string(cameraId);
     sp<IBinder> binder = new BBinder;
 
-    mCameraService->setTorchMode(cameraIdStr, true, binder);
+    mCameraService->setTorchMode(cameraIdStr, true, binder, kDefaultDeviceId, /*devicePolicy*/0);
     ALOGV("Turned torch on.");
     int32_t torchStrength = rand() % 5 + 1;
     ALOGV("Changing torch strength level to %d", torchStrength);
-    mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder);
-    mCameraService->setTorchMode(cameraIdStr, false, binder);
+    mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
+            kDefaultDeviceId, /*devicePolicy*/0);
+    mCameraService->setTorchMode(cameraIdStr, false, binder, kDefaultDeviceId, /*devicePolicy*/0);
     ALOGV("Turned torch off.");
 }
 
@@ -347,7 +351,7 @@
                                  android::CameraService::USE_CALLING_PID,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                  /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
-                                 &cameraDevice);
+                                 kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
         return;
@@ -484,20 +488,22 @@
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t, const std::string&) {
+    virtual binder::Status onStatusChanged(int32_t /*status*/, const std::string& /*cameraId*/,
+            int32_t /*deviceId*/) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/,
+            int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/) {
+            const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // No op
@@ -505,18 +511,18 @@
     }
 
     virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageName*/) {
+            const std::string& /*clientPackageName*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
     virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
-            int32_t /*torchStrength*/) {
+            int32_t /*torchStrength*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
@@ -583,7 +589,7 @@
         mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
-                &device);
+                kDefaultDeviceId, /*devicePolicy*/0, &device);
         if (device == nullptr) {
             continue;
         }
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5072edd..55e2c9d 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -18,6 +18,49 @@
     ],
 }
 
+cc_defaults {
+    name: "cameraservice_test_hostsupported",
+
+    // All test sources that can run on both host and device
+    // should be listed here
+    srcs: [
+        "ClientManagerTest.cpp",
+        "DepthProcessorTest.cpp",
+        "DistortionMapperTest.cpp",
+        "ExifUtilsTest.cpp",
+        "NV12Compressor.cpp",
+        "RotateAndCropMapperTest.cpp",
+	"SessionStatsBuilderTest.cpp",
+        "ZoomRatioTest.cpp",
+    ],
+
+    // All shared libs available on both host and device
+    // should be listed here
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libexif",
+        "libjpeg",
+        "liblog",
+        "libutils",
+        "camera_platform_flags_c_lib",
+    ],
+
+    static_libs: [
+        "libgmock",
+    ],
+
+    cflags: [
+        "-Wall",
+        "-Wextra",
+        "-Werror",
+    ],
+
+    test_suites: ["device-tests"],
+
+}
+
 cc_test {
     name: "cameraservice_test",
 
@@ -33,20 +76,15 @@
 
     defaults: [
         "libcameraservice_deps",
+	"cameraservice_test_hostsupported"
     ],
 
+    // Only include libs that can't be run host-side here
     shared_libs: [
-        "libbase",
-        "libbinder",
         "libcutils",
         "libhidlbase",
-        "liblog",
         "libcamera_client",
-        "libcamera_metadata",
         "libui",
-        "libutils",
-        "libjpeg",
-        "libexif",
         "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
         "android.hardware.camera.device@1.0",
@@ -57,6 +95,7 @@
         "camera_platform_flags_c_lib",
     ],
 
+    // Only include libs that can't be run host-side here
     static_libs: [
         "android.hardware.camera.provider@2.4",
         "android.hardware.camera.provider@2.5",
@@ -64,76 +103,40 @@
         "android.hardware.camera.provider@2.7",
         "android.hardware.camera.provider-V3-ndk",
         "libcameraservice",
-        "libgmock",
         "libflagtest",
     ],
 
+    // Only include sources that can't be run host-side here
     srcs: [
         "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
-        "ClientManagerTest.cpp",
-        "DepthProcessorTest.cpp",
-        "DistortionMapperTest.cpp",
-        "ExifUtilsTest.cpp",
-        "NV12Compressor.cpp",
-        "RotateAndCropMapperTest.cpp",
-        "ZoomRatioTest.cpp",
     ],
 
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-
-    test_suites: ["device-tests"],
-
 }
 
 cc_test_host {
     name: "cameraservice_test_host",
 
+    defaults: [
+        "cameraservice_test_hostsupported"
+    ],
+
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
         "frameworks/native/libs/binder/include_activitymanager"
     ],
 
+    // Only include libs that can't be run device-side here
     shared_libs: [
         "libactivity_manager_procstate_aidl-cpp",
-        "libbase",
-        "libbinder",
-        "libcamera_metadata",
         "libdynamic_depth",
-        "libexif",
-        "libjpeg",
-        "liblog",
-        "libutils",
-        "camera_platform_flags_c_lib",
     ],
 
+    // Only include libs that can't be run device-side here
     static_libs: [
         "libcamera_client_host",
         "libcameraservice_device_independent",
-        "libgmock",
     ],
 
-    srcs: [
-        "ClientManagerTest.cpp",
-        "DepthProcessorTest.cpp",
-        "DistortionMapperTest.cpp",
-        "ExifUtilsTest.cpp",
-        "NV12Compressor.cpp",
-        "RotateAndCropMapperTest.cpp",
-        "ZoomRatioTest.cpp",
-    ],
-
-    cflags: [
-        "-Wall",
-        "-Wextra",
-        "-Werror",
-    ],
-
-    test_suites: ["device-tests"],
-
 }
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index db43a02..30b4691 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -21,6 +21,8 @@
 
 #include <private/android_filesystem_config.h>
 
+#include <camera/CameraUtils.h>
+
 #include "../CameraService.h"
 #include "../utils/CameraServiceProxyWrapper.h"
 
@@ -35,22 +37,23 @@
 // Empty service listener.
 class TestCameraServiceListener : public hardware::BnCameraServiceListener {
 public:
-    virtual ~TestCameraServiceListener() {};
+    virtual ~TestCameraServiceListener() {}
 
-    virtual binder::Status onStatusChanged(int32_t , const std::string&) {
+    virtual binder::Status onStatusChanged(int32_t , const std::string&, int32_t) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/,
+            int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
-            const std::string& /*cameraId*/) {
+            const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         return binder::Status::ok();
-    };
+    }
 
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // No op
@@ -58,18 +61,18 @@
     }
 
     virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
-            const std::string& /*clientPackageName*/) {
+            const std::string& /*clientPackageName*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
 
     virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
-            int32_t /*torchStrength*/) {
+            int32_t /*torchStrength*/, int32_t /*deviceId*/) {
         // No op
         return binder::Status::ok();
     }
@@ -227,7 +230,8 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
         ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -241,7 +245,8 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
     }
@@ -260,14 +265,16 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -288,14 +295,16 @@
         binder::Status status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
                 sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
+                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index a53d26d..939126c 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -372,28 +372,22 @@
 };
 
 /**
- * Simple test version of the interaction proxy, to use to inject onRegistered calls to the
+ * Simple test version of HidlServiceInteractionProxy, to use to inject onRegistered calls to the
  * CameraProviderManager
  */
-struct TestInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy,
-                              public CameraProviderManager::AidlServiceInteractionProxy {
+struct TestHidlInteractionProxy : public CameraProviderManager::HidlServiceInteractionProxy {
     sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
     sp<TestICameraProvider> mTestCameraProvider;
-    std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
 
-    TestInteractionProxy() {}
+    TestHidlInteractionProxy() {}
 
     void setProvider(sp<TestICameraProvider> provider) {
         mTestCameraProvider = provider;
     }
 
-    void setAidlProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
-        mTestAidlCameraProvider = provider;
-    }
-
     std::vector<std::string> mLastRequestedServiceNames;
 
-    virtual ~TestInteractionProxy() {}
+    virtual ~TestHidlInteractionProxy() {}
 
     virtual bool registerForNotifications(
             [[maybe_unused]] const std::string &serviceName,
@@ -430,9 +424,47 @@
         hardware::hidl_vec<hardware::hidl_string> ret = {"test/0"};
         return ret;
     }
+};
+
+/**
+ * Simple test version of AidlServiceInteractionProxy, to use to inject onRegistered calls to the
+ * CameraProviderManager
+ */
+struct TestAidlInteractionProxy : public CameraProviderManager::AidlServiceInteractionProxy {
+    std::shared_ptr<TestAidlICameraProvider> mTestAidlCameraProvider;
+
+    TestAidlInteractionProxy() {}
+
+    void setProvider(std::shared_ptr<TestAidlICameraProvider> provider) {
+        mTestAidlCameraProvider = provider;
+    }
+
+    std::vector<std::string> mLastRequestedServiceNames;
+
+    virtual ~TestAidlInteractionProxy() {}
 
     virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
-    getAidlService(const std::string&) {
+            getService(const std::string& serviceName) override {
+        if (!flags::delay_lazy_hal_instantiation()) {
+            return mTestAidlCameraProvider;
+        }
+
+        // If no provider has been given, fail; in reality, getService would
+        // block for HALs that don't start correctly, so we should never use
+        // getService when we don't have a valid HAL running
+        if (mTestAidlCameraProvider == nullptr) {
+            ADD_FAILURE() << __FUNCTION__ << "called with no valid provider;"
+                          << " would block indefinitely";
+            // Real getService would block, but that's bad in unit tests. So
+            // just record an error and return nullptr
+            return nullptr;
+        }
+        mLastRequestedServiceNames.push_back(serviceName);
+        return mTestAidlCameraProvider;
+    }
+
+    virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
+    tryGetService(const std::string&) override {
         return mTestAidlCameraProvider;
     }
 };
@@ -462,7 +494,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     android::hardware::hidl_vec<uint8_t> chars;
     CameraMetadata meta;
@@ -510,7 +542,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
     serviceProxy.setProvider(provider);
@@ -560,7 +592,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
@@ -696,7 +728,7 @@
     status_t res;
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
     serviceProxy.setProvider(provider);
@@ -730,7 +762,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
 
@@ -779,7 +811,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
     sp<TestICameraProvider> provider =  new TestICameraProvider(deviceNames,
             vendorSection);
 
@@ -821,7 +853,7 @@
 
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestHidlInteractionProxy serviceProxy;
 
     android::hardware::hidl_vec<uint8_t> chars;
     CameraMetadata meta;
@@ -857,9 +889,11 @@
                 REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestAidlInteractionProxy aidlServiceProxy;
+    TestHidlInteractionProxy hidlServiceProxy;
 
-    status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+    status_t res = providerManager->initialize(statusListener,
+                                               &hidlServiceProxy, &aidlServiceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
 
     std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
@@ -868,7 +902,7 @@
             ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
     ndk::SpAIBinder spBinder = aidlProvider->asBinder();
     AIBinder* aiBinder = spBinder.get();
-    serviceProxy.setAidlProvider(aidlProvider);
+    aidlServiceProxy.setProvider(aidlProvider);
     providerManager->onServiceRegistration(
             String16("android.hardware.camera.provider.ICameraProvider/virtual/0"),
             AIBinder_toPlatformBinder(aiBinder));
@@ -883,15 +917,17 @@
                 REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(vd_flags, virtual_camera_service_discovery))) {
     sp<CameraProviderManager> providerManager = new CameraProviderManager();
     sp<TestStatusListener> statusListener = new TestStatusListener();
-    TestInteractionProxy serviceProxy;
+    TestAidlInteractionProxy aidlServiceProxy;
+    TestHidlInteractionProxy hidlServiceProxy;
 
     std::vector<std::string> cameraList = {"device@1.1/virtual/123"};
 
     std::shared_ptr<TestAidlICameraProvider> aidlProvider =
             ndk::SharedRefBase::make<TestAidlICameraProvider>(cameraList);
-    serviceProxy.setAidlProvider(aidlProvider);
+    aidlServiceProxy.setProvider(aidlProvider);
 
-    status_t res = providerManager->initialize(statusListener, &serviceProxy, &serviceProxy);
+    status_t res = providerManager->initialize(statusListener,
+                                               &hidlServiceProxy, &aidlServiceProxy);
     ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
 
     std::unordered_map<std::string, std::set<std::string>> unavailableDeviceIds;
diff --git a/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp b/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp
new file mode 100644
index 0000000..3644358
--- /dev/null
+++ b/services/camera/libcameraservice/tests/SessionStatsBuilderTest.cpp
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SessionStatsBuilderTest"
+
+#include <gtest/gtest.h>
+#include <utils/Errors.h>
+
+#include "../utils/SessionStatsBuilder.h"
+
+using namespace std;
+using namespace android;
+
+TEST(SessionStatsBuilderTest, FpsHistogramTest) {
+    SessionStatsBuilder b{};
+
+    int64_t requestCount, resultErrorCount;
+    bool deviceError;
+    pair<int32_t, int32_t> mostRequestedFpsRange;
+    map<int, StreamStats> streamStatsMap;
+
+    // Verify we get the most common FPS
+    int64_t fc = 0;
+    for (size_t i = 0; i < 10; i++, fc++) b.incFpsRequestedCount(30, 30, fc);
+    for (size_t i = 0; i < 15; i++, fc++) b.incFpsRequestedCount(15, 30, fc);
+    for (size_t i = 0; i < 20; i++, fc++) b.incFpsRequestedCount(15, 15, fc);
+    for (size_t i = 0; i < 10; i++, fc++) b.incFpsRequestedCount(60, 60, fc);
+
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(15, 15)) << "Incorrect most common FPS selected";
+
+    // Verify empty stats behavior
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(0, 0)) << "Incorrect empty stats FPS reported";
+
+    // Verify one frame behavior
+    b.incFpsRequestedCount(30, 30, 1);
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(30, 30)) << "Incorrect single-frame FPS reported";
+
+    // Verify overflow stats behavior
+    fc = 0;
+    for (size_t range = 1; range < SessionStatsBuilder::FPS_HISTOGRAM_MAX_SIZE + 2; range++) {
+        int count = SessionStatsBuilder::FPS_HISTOGRAM_MAX_SIZE * 3;
+        for (size_t i = 0; i < count - range; i++, fc++) b.incFpsRequestedCount(range, range, fc);
+    }
+    // Should have the oldest bucket dropped, so second oldest should be most common
+    b.buildAndReset(&requestCount, &resultErrorCount,
+        &deviceError, &mostRequestedFpsRange, &streamStatsMap);
+    ASSERT_EQ(mostRequestedFpsRange, make_pair(2, 2)) << "Incorrect stats overflow behavior";
+
+}
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
new file mode 100644
index 0000000..93b440b
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AttributionAndPermissionUtils.h"
+
+#include <binder/AppOpsManager.h>
+#include <binder/PermissionController.h>
+#include <com_android_internal_camera_flags.h>
+#include <cutils/properties.h>
+#include <private/android_filesystem_config.h>
+
+#include "CameraService.h"
+
+#include <binder/IPCThreadState.h>
+#include <hwbinder/IPCThreadState.h>
+#include <binderthreadstate/CallerUtils.h>
+
+namespace android {
+
+namespace flags = com::android::internal::camera::flags;
+
+const std::string AttributionAndPermissionUtils::sDumpPermission("android.permission.DUMP");
+const std::string AttributionAndPermissionUtils::sManageCameraPermission(
+        "android.permission.MANAGE_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraPermission(
+        "android.permission.CAMERA");
+const std::string AttributionAndPermissionUtils::sSystemCameraPermission(
+        "android.permission.SYSTEM_CAMERA");
+const std::string AttributionAndPermissionUtils::sCameraHeadlessSystemUserPermission(
+        "android.permission.CAMERA_HEADLESS_SYSTEM_USER");
+const std::string AttributionAndPermissionUtils::sCameraPrivacyAllowlistPermission(
+        "android.permission.CAMERA_PRIVACY_ALLOWLIST");
+const std::string AttributionAndPermissionUtils::sCameraSendSystemEventsPermission(
+        "android.permission.CAMERA_SEND_SYSTEM_EVENTS");
+const std::string AttributionAndPermissionUtils::sCameraOpenCloseListenerPermission(
+        "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
+const std::string AttributionAndPermissionUtils::sCameraInjectExternalCameraPermission(
+        "android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
+
+int AttributionAndPermissionUtils::getCallingUid() {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        return hardware::IPCThreadState::self()->getCallingUid();
+    }
+    return IPCThreadState::self()->getCallingUid();
+}
+
+int AttributionAndPermissionUtils::getCallingPid() {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        return hardware::IPCThreadState::self()->getCallingPid();
+    }
+    return IPCThreadState::self()->getCallingPid();
+}
+
+int64_t AttributionAndPermissionUtils::clearCallingIdentity() {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        return hardware::IPCThreadState::self()->clearCallingIdentity();
+    }
+    return IPCThreadState::self()->clearCallingIdentity();
+}
+
+void AttributionAndPermissionUtils::restoreCallingIdentity(int64_t token) {
+    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
+        hardware::IPCThreadState::self()->restoreCallingIdentity(token);
+    } else {
+        IPCThreadState::self()->restoreCallingIdentity(token);
+    }
+    return;
+}
+
+bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(const std::string &cameraId,
+        const AttributionSourceState &attributionSource) {
+    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+        // If cameraId is empty, then it means that this check is not used for the
+        // purpose of accessing a specific camera, hence grant permission just
+        // based on uid to the automotive privileged client.
+        if (cameraId.empty())
+            return true;
+
+        auto cameraService = mCameraService.promote();
+        if (cameraService == nullptr) {
+            ALOGE("%s: CameraService unavailable.", __FUNCTION__);
+            return false;
+        }
+
+        // If this call is used for accessing a specific camera then cam_id must be provided.
+        // In that case, only pre-grants the permission for accessing the exterior system only
+        // camera.
+        return cameraService->isAutomotiveExteriorSystemCamera(cameraId);
+    }
+
+    return false;
+}
+
+bool AttributionAndPermissionUtils::checkPermissionForPreflight(const std::string &cameraId,
+        const std::string &permission, const AttributionSourceState &attributionSource,
+        const std::string& message, int32_t attributedOpCode) {
+    if (checkAutomotivePrivilegedClient(cameraId, attributionSource)) {
+        return true;
+    }
+
+    if (!flags::cache_permission_services()) {
+        PermissionChecker permissionChecker;
+        return permissionChecker.checkPermissionForPreflight(
+                       toString16(permission), attributionSource, toString16(message),
+                       attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+    } else {
+        return mPermissionChecker->checkPermissionForPreflight(
+                       toString16(permission), attributionSource, toString16(message),
+                       attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
+    }
+}
+
+// Can camera service trust the caller based on the calling UID?
+bool AttributionAndPermissionUtils::isTrustedCallingUid(uid_t uid) {
+    switch (uid) {
+        case AID_MEDIA:        // mediaserver
+        case AID_CAMERASERVER: // cameraserver
+        case AID_RADIO:        // telephony
+            return true;
+        default:
+            return false;
+    }
+}
+
+bool AttributionAndPermissionUtils::isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isHeadlessSystemUserMode() {
+    // Checks if the device is running in headless system user mode
+    // by checking the property ro.fw.mu.headless_system_user.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.fw.mu.headless_system_user", value, "");
+    return strncmp(value, "true", PROPERTY_VALUE_MAX) == 0;
+}
+
+bool AttributionAndPermissionUtils::isAutomotivePrivilegedClient(int32_t uid) {
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+    // privileged client uid used for safety critical use cases such as
+    // rear view and surround view.
+    return uid == AID_AUTOMOTIVE_EVS;
+}
+
+status_t AttributionAndPermissionUtils::getUidForPackage(const std::string &packageName,
+        int userId, /*inout*/uid_t& uid, int err) {
+    PermissionController pc;
+    uid = pc.getPackageUid(toString16(packageName), 0);
+    if (uid <= 0) {
+        ALOGE("Unknown package: '%s'", packageName.c_str());
+        dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
+        return BAD_VALUE;
+    }
+
+    if (userId < 0) {
+        ALOGE("Invalid user: %d", userId);
+        dprintf(err, "Invalid user: %d\n", userId);
+        return BAD_VALUE;
+    }
+
+    uid = multiuser_get_uid(userId, uid);
+    return NO_ERROR;
+}
+
+bool AttributionAndPermissionUtils::isCallerCameraServerNotDelegating() {
+    return (getCallingPid() == getpid());
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCamera(const std::string& cameraId,
+        const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(cameraId, sCameraPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForSystemCamera(const std::string& cameraId,
+        const AttributionSourceState& attributionSource, bool checkCameraPermissions) {
+    bool systemCameraPermission = checkPermissionForPreflight(cameraId,
+            sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+    return systemCameraPermission && (!checkCameraPermissions
+            || hasPermissionsForCamera(cameraId, attributionSource));
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraHeadlessSystemUser(
+        const std::string& cameraId, const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(cameraId, sCameraHeadlessSystemUserPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForCameraPrivacyAllowlist(
+        const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(std::string(), sCameraPrivacyAllowlistPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+bool AttributionAndPermissionUtils::hasPermissionsForOpenCloseListener(
+        const AttributionSourceState& attributionSource) {
+    return checkPermissionForPreflight(std::string(), sCameraOpenCloseListenerPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
new file mode 100644
index 0000000..4f238ab
--- /dev/null
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+#define ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
+
+#include <android/content/AttributionSourceState.h>
+#include <android/permission/PermissionChecker.h>
+#include <binder/BinderService.h>
+#include <private/android_filesystem_config.h>
+
+namespace android {
+
+class CameraService;
+
+using content::AttributionSourceState;
+using permission::PermissionChecker;
+
+/**
+ * Utility class consolidating methods/data for verifying permissions and the identity of the
+ * caller.
+ */
+class AttributionAndPermissionUtils {
+  public:
+    AttributionAndPermissionUtils() { }
+    virtual ~AttributionAndPermissionUtils() {}
+
+    void setCameraService(wp<CameraService> cameraService) {
+        mCameraService = cameraService;
+    }
+
+    // Utilities handling Binder calling identities (previously in CameraThreadState)
+    virtual int getCallingUid();
+    virtual int getCallingPid();
+    virtual int64_t clearCallingIdentity();
+    virtual void restoreCallingIdentity(int64_t token);
+
+    /**
+     * Pre-grants the permission if the attribution source uid is for an automotive
+     * privileged client. Otherwise uses system service permission checker to check
+     * for the appropriate permission. If this function is called for accessing a specific
+     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+     * privileged client so that permission is pre-granted only to access system camera device
+     * which is located outside of the vehicle body frame because camera located inside the vehicle
+     * cabin would need user permission.
+     */
+    virtual bool checkPermissionForPreflight(const std::string &cameraId,
+            const std::string &permission, const AttributionSourceState& attributionSource,
+            const std::string& message, int32_t attributedOpCode);
+
+    // Can camera service trust the caller based on the calling UID?
+    virtual bool isTrustedCallingUid(uid_t uid);
+
+    virtual bool isAutomotiveDevice();
+    virtual bool isHeadlessSystemUserMode();
+
+    /**
+     * Returns true if the client has uid AID_AUTOMOTIVE_EVS and the device is an automotive device.
+     */
+    virtual bool isAutomotivePrivilegedClient(int32_t uid);
+
+    virtual status_t getUidForPackage(const std::string &packageName, int userId,
+            /*inout*/uid_t& uid, int err);
+    virtual bool isCallerCameraServerNotDelegating();
+
+    // Utils for checking specific permissions
+    virtual bool hasPermissionsForCamera(const std::string& cameraId,
+            const AttributionSourceState& attributionSource);
+    virtual bool hasPermissionsForSystemCamera(const std::string& cameraId,
+            const AttributionSourceState& attributionSource, bool checkCameraPermissions = true);
+    virtual bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId,
+            const AttributionSourceState& attributionSource);
+    virtual bool hasPermissionsForCameraPrivacyAllowlist(
+            const AttributionSourceState& attributionSource);
+    virtual bool hasPermissionsForOpenCloseListener(
+            const AttributionSourceState& attributionSource);
+
+    static const std::string sDumpPermission;
+    static const std::string sManageCameraPermission;
+    static const std::string sCameraPermission;
+    static const std::string sSystemCameraPermission;
+    static const std::string sCameraHeadlessSystemUserPermission;
+    static const std::string sCameraPrivacyAllowlistPermission;
+    static const std::string sCameraSendSystemEventsPermission;
+    static const std::string sCameraOpenCloseListenerPermission;
+    static const std::string sCameraInjectExternalCameraPermission;
+
+  protected:
+    wp<CameraService> mCameraService;
+
+    bool checkAutomotivePrivilegedClient(const std::string &cameraId,
+            const AttributionSourceState &attributionSource);
+
+  private:
+    std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
+            std::make_unique<permission::PermissionChecker>();
+};
+
+/**
+ * Class to be inherited by classes encapsulating AttributionAndPermissionUtils. Provides an
+ * additional utility layer above AttributionAndPermissionUtils calls, and avoids verbosity
+ * in the encapsulating class's methods.
+ */
+class AttributionAndPermissionUtilsEncapsulator {
+protected:
+    std::shared_ptr<AttributionAndPermissionUtils> mAttributionAndPermissionUtils;
+
+public:
+    AttributionAndPermissionUtilsEncapsulator(
+        std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils)
+            : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        AttributionSourceState attributionSource{};
+        attributionSource.pid = callingPid;
+        attributionSource.uid = callingUid;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+        attributionSource.deviceId = deviceId;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            const std::string& packageName, int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid,
+                deviceId);
+        attributionSource.packageName = packageName;
+        return attributionSource;
+    }
+
+    int getCallingUid() const {
+        return mAttributionAndPermissionUtils->getCallingUid();
+    }
+
+    int getCallingPid() const {
+        return mAttributionAndPermissionUtils->getCallingPid();
+    }
+
+    int64_t clearCallingIdentity() const {
+        return mAttributionAndPermissionUtils->clearCallingIdentity();
+    }
+
+    void restoreCallingIdentity(int64_t token) const {
+        mAttributionAndPermissionUtils->restoreCallingIdentity(token);
+    }
+
+    // The word 'System' here does not refer to callers only on the system
+    // partition. They just need to have an android system uid.
+    bool callerHasSystemUid() const {
+        return (getCallingUid() < AID_APP_START);
+    }
+
+    bool hasPermissionsForCamera(int callingPid, int callingUid, int32_t deviceId) const {
+        return hasPermissionsForCamera(std::string(), callingPid, callingUid, deviceId);
+    }
+
+    bool hasPermissionsForCamera(int callingPid, int callingUid,
+            const std::string& packageName, int32_t deviceId) const {
+        return hasPermissionsForCamera(std::string(), callingPid, callingUid, packageName,
+                deviceId);
+    }
+
+    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid,
+            int callingUid, int32_t deviceId) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid,
+                deviceId);
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForCamera(const std::string& cameraId, int callingPid, int callingUid,
+            const std::string& packageName, int32_t deviceId) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid, packageName,
+                deviceId);
+        return mAttributionAndPermissionUtils->hasPermissionsForCamera(cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid,
+            bool checkCameraPermissions = true) const  {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForSystemCamera(
+                    cameraId, attributionSource, checkCameraPermissions);
+    }
+
+    bool hasPermissionsForCameraHeadlessSystemUser(const std::string& cameraId, int callingPid,
+            int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCameraHeadlessSystemUser(
+                    cameraId, attributionSource);
+    }
+
+    bool hasPermissionsForCameraPrivacyAllowlist(int callingPid, int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForCameraPrivacyAllowlist(
+                attributionSource);
+    }
+
+    bool hasPermissionsForOpenCloseListener(int callingPid, int callingUid) const {
+        auto attributionSource = buildAttributionSource(callingPid, callingUid);
+        return mAttributionAndPermissionUtils->hasPermissionsForOpenCloseListener(
+                attributionSource);
+    }
+
+    bool isAutomotiveDevice() const {
+        return mAttributionAndPermissionUtils->isAutomotiveDevice();
+    }
+
+    bool isAutomotivePrivilegedClient(int32_t uid) const {
+        return mAttributionAndPermissionUtils->isAutomotivePrivilegedClient(uid);
+    }
+
+    bool isTrustedCallingUid(uid_t uid) const {
+        return mAttributionAndPermissionUtils->isTrustedCallingUid(uid);
+    }
+
+    bool isHeadlessSystemUserMode() const {
+        return mAttributionAndPermissionUtils->isHeadlessSystemUserMode();
+    }
+
+    status_t getUidForPackage(const std::string &packageName, int userId,
+            /*inout*/uid_t& uid, int err) const {
+        return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
+    }
+
+    bool isCallerCameraServerNotDelegating() const {
+        return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
+    }
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_ATTRIBUTION_AND_PERMISSION_UTILS_H
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 65e93a9..4afae9b 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -95,7 +95,8 @@
         sp<hardware::ICameraServiceProxy>& proxyBinder,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-        bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
+        bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
     Mutex::Autolock l(mLock);
 
     mSessionStats.mNewCameraState = CameraSessionStats::CAMERA_STATE_IDLE;
@@ -106,6 +107,7 @@
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mUsedUltraWide = usedUltraWide;
     mSessionStats.mUsedZoomOverride = usedZoomOverride;
+    mSessionStats.mMostRequestedFpsRange = mostRequestedFpsRange;
     mSessionStats.mStreamStats = streamStats;
 
     updateProxyDeviceState(proxyBinder);
@@ -281,7 +283,8 @@
 void CameraServiceProxyWrapper::logIdle(const std::string& id,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-        bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats) {
+        bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+        const std::vector<hardware::CameraStreamStats>& streamStats) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -294,8 +297,9 @@
     }
 
     ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
-            ", userTag %s, videoStabilizationMode %d", __FUNCTION__, id.c_str(), requestCount,
-            resultErrorCount, deviceError, userTag.c_str(), videoStabilizationMode);
+            ", userTag %s, videoStabilizationMode %d, most common FPS [%d,%d]",
+            __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError, userTag.c_str(),
+            videoStabilizationMode, mostRequestedFpsRange.first, mostRequestedFpsRange.second);
     for (size_t i = 0; i < streamStats.size(); i++) {
         ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
                 PRId64 ", startTimeMs %d" ,
@@ -306,7 +310,8 @@
 
     sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
     sessionStats->onIdle(proxyBinder, requestCount, resultErrorCount, deviceError, userTag,
-            videoStabilizationMode, usedUltraWide, usedZoomOverride, streamStats);
+            videoStabilizationMode, usedUltraWide, usedZoomOverride,
+            mostRequestedFpsRange, streamStats);
 }
 
 void CameraServiceProxyWrapper::logOpen(const std::string& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 49b7a8c..b6a967f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -62,7 +62,8 @@
         void onIdle(sp<hardware::ICameraServiceProxy>& proxyBinder,
                 int64_t requestCount, int64_t resultErrorCount, bool deviceError,
                 const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-                bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
+                bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+                const std::vector<hardware::CameraStreamStats>& streamStats);
 
         std::string updateExtensionSessionStats(
                 const hardware::CameraExtensionSessionStats& extStats);
@@ -111,7 +112,8 @@
     void logIdle(const std::string& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::string& userTag, int32_t videoStabilizationMode, bool usedUltraWide,
-            bool usedZoomOverride, const std::vector<hardware::CameraStreamStats>& streamStats);
+            bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
+            const std::vector<hardware::CameraStreamStats>& streamStats);
 
     // Ping camera service proxy for user update
     void pingCameraServiceProxy();
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.cpp b/services/camera/libcameraservice/utils/CameraThreadState.cpp
deleted file mode 100644
index 2352b80..0000000
--- a/services/camera/libcameraservice/utils/CameraThreadState.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2018 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "CameraThreadState.h"
-#include <binder/IPCThreadState.h>
-#include <hwbinder/IPCThreadState.h>
-#include <binderthreadstate/CallerUtils.h>
-#include <unistd.h>
-
-namespace android {
-
-int CameraThreadState::getCallingUid() {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        return hardware::IPCThreadState::self()->getCallingUid();
-    }
-    return IPCThreadState::self()->getCallingUid();
-}
-
-int CameraThreadState::getCallingPid() {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        return hardware::IPCThreadState::self()->getCallingPid();
-    }
-    return IPCThreadState::self()->getCallingPid();
-}
-
-int64_t CameraThreadState::clearCallingIdentity() {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        return hardware::IPCThreadState::self()->clearCallingIdentity();
-    }
-    return IPCThreadState::self()->clearCallingIdentity();
-}
-
-void CameraThreadState::restoreCallingIdentity(int64_t token) {
-    if (getCurrentServingCall() == BinderCallType::HWBINDER) {
-        hardware::IPCThreadState::self()->restoreCallingIdentity(token);
-    } else {
-        IPCThreadState::self()->restoreCallingIdentity(token);
-    }
-    return;
-}
-
-} // android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
index 92a1030..f3afc69 100644
--- a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -20,7 +20,6 @@
 #include <pthread.h>
 #include <sched.h>
 
-#include "CameraThreadState.h"
 #include <private/android_filesystem_config.h>
 #include <processgroup/processgroup.h>
 #include <processgroup/sched_policy.h>
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 11ef9b7..a7a2b5e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -27,6 +27,7 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "device3/ZoomRatioMapper.h"
 #include "system/graphics-base-v1.1.h"
 #include <camera/StringUtils.h>
 #include <ui/PublicFormat.h>
@@ -432,7 +433,7 @@
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
-        int32_t colorSpace) {
+        int32_t colorSpace, bool respectSurfaceSize) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
         std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -529,8 +530,10 @@
         // we can use the default stream configuration map
         foundInMaxRes = true;
     }
-    // Round dimensions to the nearest dimensions available for this format
-    if (flexibleConsumer && isPublicFormat(format) &&
+    // Round dimensions to the nearest dimensions available for this format.
+    // Only do the rounding if the client doesn't ask to respect the surface
+    // size.
+    if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
             /*out*/&height)) {
@@ -684,7 +687,8 @@
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
-        bool checkSessionParams, bool *earlyExit) {
+        bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
+        bool *earlyExit) {
     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
     auto operatingMode = sessionConfiguration.getOperatingMode();
     binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
@@ -753,6 +757,7 @@
         const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
             it.getGraphicBufferProducers();
         bool deferredConsumer = it.isDeferred();
+        bool isConfigurationComplete = it.isComplete();
         const std::string &physicalCameraId = it.getPhysicalCameraId();
 
         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
@@ -768,7 +773,8 @@
         int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
         OutputStreamInfo streamInfo;
 
-        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
+        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
+                               isConfigurationComplete);
         if (!res.isOk()) {
             return res;
         }
@@ -781,15 +787,38 @@
         int64_t streamUseCase = it.getStreamUseCase();
         int timestampBase = it.getTimestampBase();
         int mirrorMode = it.getMirrorMode();
-        if (deferredConsumer) {
+        // If the configuration is a deferred consumer, or a not yet completed
+        // configuration with no buffer producers attached.
+        if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
             streamInfo.width = it.getWidth();
             streamInfo.height = it.getHeight();
-            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
             auto surfaceType = it.getSurfaceType();
-            streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
-            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
-                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
+            switch (surfaceType) {
+                case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
+                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
+                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+                    break;
+                case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
+                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
+                            | GraphicBuffer::USAGE_HW_COMPOSER;
+                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+                    break;
+                case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
+                case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
+                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
+                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+                    break;
+                case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
+                    streamInfo.consumerUsage = it.getUsage();
+                    streamInfo.format = it.getFormat();
+                    streamInfo.dataSpace = (android_dataspace)it.getDataspace();
+                    break;
+                default:
+                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+                                        "Invalid surface type.");
             }
             streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
             if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
@@ -815,7 +844,8 @@
             sp<Surface> surface;
             res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
                     logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
-                    streamUseCase, timestampBase, mirrorMode, colorSpace);
+                    streamUseCase, timestampBase, mirrorMode, colorSpace,
+                    /*respectSurfaceSize*/true);
 
             if (!res.isOk())
                 return res;
@@ -884,7 +914,7 @@
         CameraMetadata filteredParams;
 
         filterParameters(sessionConfiguration.getSessionParameters(), deviceInfo,
-                vendorTagId, filteredParams);
+                additionalKeys, vendorTagId, filteredParams);
 
         camera_metadata_t* metadata = const_cast<camera_metadata_t*>(filteredParams.getAndLock());
         uint8_t *metadataP = reinterpret_cast<uint8_t*>(metadata);
@@ -912,22 +942,37 @@
 }
 
 binder::Status checkSurfaceType(size_t numBufferProducers,
-        bool deferredConsumer, int surfaceType)  {
+        bool deferredConsumer, int surfaceType, bool isConfigurationComplete)  {
     if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
         ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
                 __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
-    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
+    } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
         ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
     }
 
-    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
-            (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
-
-    if (deferredConsumer && !validSurfaceType) {
-        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
+    if (deferredConsumer) {
+        bool validSurfaceType = (
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+        if (!validSurfaceType) {
+            std::string msg = fmt::sprintf("Deferred target surface has invalid "
+                    "surfaceType = %d.", surfaceType);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+    } else if (!isConfigurationComplete && numBufferProducers == 0) {
+        bool validSurfaceType = (
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
+                (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
+        if (!validSurfaceType) {
+            std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
+                    "surfaceType = %d.", surfaceType);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
     }
 
     return binder::Status::ok();
@@ -1133,7 +1178,8 @@
 }
 
 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
-        metadata_vendor_id_t vendorTagId, CameraMetadata& dst) {
+        const std::vector<int32_t>& additionalTags, metadata_vendor_id_t vendorTagId,
+        CameraMetadata& dst) {
     const CameraMetadata params(src);
     camera_metadata_ro_entry_t availableSessionKeys = deviceInfo.find(
             ANDROID_REQUEST_AVAILABLE_SESSION_KEYS);
@@ -1142,9 +1188,12 @@
             filteredParams.getAndLock());
     set_camera_metadata_vendor_id(meta, vendorTagId);
     filteredParams.unlock(meta);
-    for (size_t i = 0; i < availableSessionKeys.count; i++) {
-        camera_metadata_ro_entry entry = params.find(
-                availableSessionKeys.data.i32[i]);
+
+    std::unordered_set<int32_t> filteredTags(availableSessionKeys.data.i32,
+            availableSessionKeys.data.i32 + availableSessionKeys.count);
+    filteredTags.insert(additionalTags.begin(), additionalTags.end());
+    for (int32_t tag : filteredTags) {
+        camera_metadata_ro_entry entry = params.find(tag);
         if (entry.count > 0) {
             filteredParams.update(entry);
         }
@@ -1152,6 +1201,29 @@
     dst = std::move(filteredParams);
 }
 
+status_t overrideDefaultRequestKeys(CameraMetadata *request) {
+    // Override the template request with ZoomRatioMapper
+    status_t res = ZoomRatioMapper::initZoomRatioInTemplate(request);
+    if (res != OK) {
+        ALOGE("Failed to update zoom ratio: %s (%d)", strerror(-res), res);
+        return res;
+    }
+
+    // Fill in JPEG_QUALITY if not available
+    if (!request->exists(ANDROID_JPEG_QUALITY)) {
+        static const uint8_t kDefaultJpegQuality = 95;
+        request->update(ANDROID_JPEG_QUALITY, &kDefaultJpegQuality, 1);
+    }
+
+    // Fill in AUTOFRAMING if not available
+    if (!request->exists(ANDROID_CONTROL_AUTOFRAMING)) {
+        static const uint8_t kDefaultAutoframingMode = ANDROID_CONTROL_AUTOFRAMING_OFF;
+        request->update(ANDROID_CONTROL_AUTOFRAMING, &kDefaultAutoframingMode, 1);
+    }
+
+    return OK;
+}
+
 } // namespace SessionConfigurationUtils
 } // namespace camera3
 } // namespace android
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 5b2ea5c..3c0f109 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -112,7 +112,7 @@
         const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
-        int32_t colorSpace);
+        int32_t colorSpace, bool respectSurfaceSize);
 
 //check if format is 10-bit output compatible
 bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
@@ -143,10 +143,10 @@
 const std::string &logicalCameraId);
 
 binder::Status checkSurfaceType(size_t numBufferProducers,
-bool deferredConsumer, int surfaceType);
+        bool deferredConsumer, int surfaceType, bool isConfigurationComplete);
 
 binder::Status checkOperatingMode(int operatingMode,
-const CameraMetadata &staticInfo, const std::string &cameraId);
+        const CameraMetadata &staticInfo, const std::string &cameraId);
 
 binder::Status
 convertToHALStreamCombination(
@@ -156,7 +156,8 @@
     const std::vector<std::string> &physicalCameraIds,
     aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
     bool overrideForPerfClass, metadata_vendor_id_t vendorTagId,
-    bool checkSessionParams, bool *earlyExit);
+    bool checkSessionParams, const std::vector<int32_t>& additionalKeys,
+    bool *earlyExit);
 
 StreamConfigurationPair getStreamConfigurationPair(const CameraMetadata &metadata);
 
@@ -177,7 +178,14 @@
         aidl::android::hardware::camera::device::RequestTemplate* tempId /*out*/);
 
 void filterParameters(const CameraMetadata& src, const CameraMetadata& deviceInfo,
-        metadata_vendor_id_t vendorTagId, CameraMetadata& dst);
+        const std::vector<int32_t>& additionalKeys, metadata_vendor_id_t vendorTagId,
+        CameraMetadata& dst);
+
+status_t overrideDefaultRequestKeys(CameraMetadata *request);
+
+template <typename T> bool contains(std::set<T> container, T value) {
+    return container.find(value) != container.end();
+}
 
 constexpr int32_t MAX_SURFACES_PER_STREAM = 4;
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index bf8ea84..cfa1815 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -113,7 +113,7 @@
     auto ret = convertToHALStreamCombination(sessionConfiguration, logicalCameraId, deviceInfo,
             false /*isCompositeJpegRDisabled*/, getMetadata, physicalCameraIds,
             aidlStreamConfiguration, overrideForPerfClass, vendorTagId,
-            /*checkSessionParams*/false, earlyExit);
+            /*checkSessionParams*/false, /*additionalKeys*/{}, earlyExit);
     if (!ret.isOk()) {
         return ret;
     }
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
index c3aac72..2bca4cb 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -48,19 +48,35 @@
 
 void SessionStatsBuilder::buildAndReset(int64_t* requestCount,
         int64_t* errorResultCount, bool* deviceError,
-        std::map<int, StreamStats> *statsMap) {
+        std::pair<int32_t, int32_t>* mostRequestedFpsRange,
+        std::map<int, StreamStats>* statsMap) {
     std::lock_guard<std::mutex> l(mLock);
     *requestCount = mRequestCount;
     *errorResultCount = mErrorResultCount;
     *deviceError = mDeviceError;
     *statsMap = mStatsMap;
 
+    int32_t minFps = 0, maxFps = 0;
+    if (mRequestedFpsRangeHistogram.size() > 0) {
+        auto mostCommonIt = mRequestedFpsRangeHistogram.begin();
+        for (auto it = mostCommonIt; it != mRequestedFpsRangeHistogram.end(); it++) {
+            if (it->second.first > mostCommonIt->second.first) {
+                mostCommonIt = it;
+            }
+        }
+        minFps = mostCommonIt->first >> 32;
+        maxFps = mostCommonIt->first & 0xFFFF'FFFFU;
+    }
+    *mostRequestedFpsRange = std::make_pair(minFps, maxFps);
+
     // Reset internal states
     mRequestCount = 0;
     mErrorResultCount = 0;
     mCounterStopped = false;
     mDeviceError = false;
     mUserTag.clear();
+    mRequestedFpsRangeHistogram.clear();
+
     for (auto& streamStats : mStatsMap) {
         StreamStats& streamStat = streamStats.second;
         streamStat.mRequestedFrameCount = 0;
@@ -125,6 +141,31 @@
     mDeviceError = true;
 }
 
+void SessionStatsBuilder::incFpsRequestedCount(int32_t minFps, int32_t maxFps,
+        int64_t frameNumber) {
+    std::lock_guard<std::mutex> l(mLock);
+
+    // Stuff range into a 64-bit value to make hashing simple
+    uint64_t currentFpsTarget = minFps;
+    currentFpsTarget = currentFpsTarget << 32 | maxFps;
+
+    auto &stats = mRequestedFpsRangeHistogram[currentFpsTarget];
+    stats.first++;
+    stats.second = frameNumber;
+
+    // Ensure weird app input of target FPS ranges doesn't cause unbounded memory growth
+    if (mRequestedFpsRangeHistogram.size() > FPS_HISTOGRAM_MAX_SIZE) {
+        // Find oldest used fps to drop by last seen frame number
+        auto deleteIt = mRequestedFpsRangeHistogram.begin();
+        for (auto it = deleteIt; it != mRequestedFpsRangeHistogram.end(); it++) {
+            if (it->second.second < deleteIt->second.second) {
+                deleteIt = it;
+            }
+        }
+        mRequestedFpsRangeHistogram.erase(deleteIt);
+    }
+}
+
 void StreamStats::updateLatencyHistogram(int32_t latencyMs) {
     size_t i;
     for (i = 0; i < mCaptureLatencyBins.size(); i++) {
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
index 2936531..914c09e 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.h
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -22,6 +22,8 @@
 #include <array>
 #include <map>
 #include <mutex>
+#include <unordered_map>
+#include <utility>
 
 namespace android {
 
@@ -64,7 +66,8 @@
     void buildAndReset(/*out*/int64_t* requestCount,
             /*out*/int64_t* errorResultCount,
             /*out*/bool* deviceError,
-            /*out*/std::map<int, StreamStats> *statsMap);
+            /*out*/std::pair<int32_t, int32_t>* mostRequestedFpsRange,
+            /*out*/std::map<int, StreamStats>* statsMap);
 
     // Stream specific counter
     void startCounter(int streamId);
@@ -76,6 +79,13 @@
     void incResultCounter(bool dropped);
     void onDeviceError();
 
+    // Session specific statistics
+
+    // Limit on size of FPS range histogram
+    static const size_t FPS_HISTOGRAM_MAX_SIZE = 10;
+
+    void incFpsRequestedCount(int32_t minFps, int32_t maxFps, int64_t frameNumber);
+
     SessionStatsBuilder() : mRequestCount(0), mErrorResultCount(0),
              mCounterStopped(false), mDeviceError(false) {}
 private:
@@ -85,6 +95,11 @@
     bool mCounterStopped;
     bool mDeviceError;
     std::string mUserTag;
+
+    // Histogram of frame counts of requested target FPS ranges
+    // (min_fps << 32 | max_fps) -> (# of frames with this fps, last seen framenumber)
+    std::unordered_map<uint64_t, std::pair<int64_t, int64_t>> mRequestedFpsRangeHistogram;
+
     // Map from stream id to stream statistics
     std::map<int, StreamStats> mStatsMap;
 };
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
new file mode 100644
index 0000000..c8f5e86
--- /dev/null
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Utils.h"
+#include <android-base/properties.h>
+#include <com_android_internal_camera_flags.h>
+
+
+namespace android {
+
+using namespace com::android::internal::camera::flags;
+
+constexpr const char *LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
+constexpr const char *BOARD_API_LEVEL_PROP = "ro.board.api_level";
+constexpr int MAX_VENDOR_API_LEVEL = 1000000;
+constexpr int FIRST_VNDK_VERSION = 202404;
+
+int getVNDKVersionFromProp(int defaultVersion) {
+    if (!com_android_internal_camera_flags_use_ro_board_api_level_for_vndk_version()) {
+        return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
+    }
+
+    int vndkVersion = base::GetIntProperty(BOARD_API_LEVEL_PROP, MAX_VENDOR_API_LEVEL);
+
+    if (vndkVersion == MAX_VENDOR_API_LEVEL) {
+        // Couldn't find property
+        return defaultVersion;
+    }
+
+    if (vndkVersion < __ANDROID_API_V__) {
+        // VNDK versions below V return the corresponding SDK version.
+        return vndkVersion;
+    }
+
+    // VNDK for Android V and above are of the format YYYYMM starting with 202404 and is bumped
+    // up once a year. So V would be 202404 and the next one would be 202504.
+    // This is the same assumption as that made in system/core/init/property_service.cpp.
+    vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
+    return __ANDROID_API_V__ + vndkVersion;
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
new file mode 100644
index 0000000..f8a107d
--- /dev/null
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_UTILS_H
+#define ANDROID_SERVERS_CAMERA_UTILS_H
+
+namespace android {
+
+/**
+ * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
+ * instead of release SDK version. This function maps year/month format back to release
+ * SDK version.
+ *
+ * Returns defaultVersion if the property is not found.
+ */
+int getVNDKVersionFromProp(int defaultVersion);
+
+} // namespace android
+
+#endif //ANDROID_SERVERS_CAMERA_UTILS_H
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
new file mode 100644
index 0000000..1d0c5a2
--- /dev/null
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VirtualDeviceCameraIdMapper"
+
+#include <android_companion_virtualdevice_flags.h>
+#include <camera/CameraUtils.h>
+
+#include "VirtualDeviceCameraIdMapper.h"
+
+namespace android {
+
+namespace vd_flags = android::companion::virtualdevice::flags;
+
+void VirtualDeviceCameraIdMapper::addCamera(const std::string& cameraId,
+        int32_t deviceId, const std::string& mappedCameraId) {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        return;
+    }
+
+    if (deviceId == kDefaultDeviceId) {
+        ALOGD("%s: Not adding entry for a camera of the default device", __func__);
+        return;
+    }
+
+    ALOGD("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
+          deviceId, mappedCameraId.c_str());
+
+    std::scoped_lock lock(mLock);
+    mDeviceIdMappedCameraIdPairToCameraIdMap[{deviceId, mappedCameraId}] = cameraId;
+}
+
+void VirtualDeviceCameraIdMapper::removeCamera(const std::string& cameraId) {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        return;
+    }
+
+    std::scoped_lock lock(mLock);
+    for (auto it = mDeviceIdMappedCameraIdPairToCameraIdMap.begin();
+         it != mDeviceIdMappedCameraIdPairToCameraIdMap.end(); ++it) {
+        if (it->first.second == cameraId) {
+            mDeviceIdMappedCameraIdPairToCameraIdMap.erase(it);
+            return;
+        }
+    }
+}
+
+std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
+        int32_t deviceId, const std::string& mappedCameraId) const {
+    if (deviceId == kDefaultDeviceId) {
+        ALOGD("%s: Returning the camera id as the mapped camera id for camera %s, as it "
+              "belongs to the default device", __func__, mappedCameraId.c_str());
+        return mappedCameraId;
+    }
+
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGD("%s: Device-aware camera feature is not enabled, returning the camera id as "
+              "the mapped camera id for camera %s", __func__, mappedCameraId.c_str());
+        return mappedCameraId;
+    }
+
+    std::scoped_lock lock(mLock);
+    auto iterator = mDeviceIdMappedCameraIdPairToCameraIdMap.find(
+            {deviceId, mappedCameraId});
+    if (iterator == mDeviceIdMappedCameraIdPairToCameraIdMap.end()) {
+        ALOGW("%s: No entry found for device id %d and mapped camera id %s", __func__,
+              deviceId, mappedCameraId.c_str());
+        return std::nullopt;
+    }
+    return iterator->second;
+}
+
+std::pair<int32_t, std::string> VirtualDeviceCameraIdMapper::getDeviceIdAndMappedCameraIdPair(
+        const std::string& cameraId) const {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        return std::make_pair(kDefaultDeviceId, cameraId);
+    }
+
+    std::scoped_lock lock(mLock);
+    for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
+            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+        if (actualCameraId == cameraId) {
+            return deviceIdMappedCameraIdPair;
+        }
+    }
+    ALOGV("%s: No device id and mapped camera id found for camera id %s, so it must belong "
+            "to the default device ? ", __func__, cameraId.c_str());
+    return std::make_pair(kDefaultDeviceId, cameraId);
+}
+
+int VirtualDeviceCameraIdMapper::getNumberOfCameras(int32_t deviceId) const {
+    if (!vd_flags::camera_device_awareness()) {
+        return 0;
+    }
+
+    int numOfCameras = 0;
+    std::scoped_lock lock(mLock);
+    for (const auto& [deviceIdMappedCameraIdPair, _]
+            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+        if (deviceIdMappedCameraIdPair.first == deviceId) {
+            numOfCameras++;
+        }
+    }
+    return numOfCameras;
+}
+
+std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
+        int api1CameraId, int32_t deviceId) const {
+    if (!vd_flags::camera_device_awareness()) {
+        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        return std::nullopt;
+    }
+
+    int matchingCameraIndex = 0;
+    std::scoped_lock lock(mLock);
+    for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
+            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
+        if (deviceIdMappedCameraIdPair.first == deviceId) {
+            if (matchingCameraIndex == api1CameraId) {
+                return actualCameraId;
+            }
+            matchingCameraIndex++;
+        }
+    }
+    ALOGW("%s: No entry found for device id %d and API 1 camera id %d", __func__,
+          deviceId, api1CameraId);
+    return std::nullopt;
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
new file mode 100644
index 0000000..96c0cb4
--- /dev/null
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
+#define ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
+
+#include <string>
+#include <map>
+#include <mutex>
+
+#include <utils/Mutex.h>
+
+namespace android {
+
+class VirtualDeviceCameraIdMapper {
+public:
+    VirtualDeviceCameraIdMapper() {}
+
+    virtual ~VirtualDeviceCameraIdMapper() {}
+
+    void addCamera(const std::string& cameraId, int32_t deviceId,
+            const std::string& mappedCameraId) EXCLUDES(mLock);
+
+    void removeCamera(const std::string& cameraId) EXCLUDES(mLock);
+
+    /**
+     * Return the actual camera id for a given device id (i.e., the id of the device owning
+     * the camera, for a virtual camera this would be the id of the virtual device, and for
+     * any other cameras this would be default device id, i.e., 0) and mapped camera
+     * id (for virtual devices, the back and front virtual cameras of that device would have
+     * 0 and 1 respectively as their mapped camera id, and for any other cameras this
+     * would be their actual camera id). When the camera device awareness flag is disabled,
+     * this will return the given camera id itself.
+     */
+    std::optional<std::string> getActualCameraId(int32_t deviceId,
+            const std::string& mappedCameraId) const EXCLUDES(mLock);
+
+    /**
+     * Return the device id (i.e., the id of the device owning the camera, for a virtual
+     * camera this would be the id of the virtual device, and for any other cameras this
+     * would be default device id, i.e., 0) and the mapped camera id (for virtual
+     * devices, the back and front virtual cameras of that device would have 0 and 1
+     * respectively as their mapped camera id, and for any other cameras this would
+     * be their actual camera id) for a given camera id. When the camera device awareness flag is
+     * disabled, this will return a pair of kDefaultDeviceId and the given cameraId.
+     */
+    std::pair<int32_t, std::string> getDeviceIdAndMappedCameraIdPair(
+            const std::string& cameraId) const EXCLUDES(mLock);
+
+    /**
+     * Return the number of virtual cameras corresponding to the legacy camera API
+     * getNumberOfCameras. When the camera device awareness flag is disabled, this will return 0.
+     */
+    int getNumberOfCameras(int32_t deviceId) const EXCLUDES(mLock);
+
+    /**
+     * Return the actual camera id corresponding to the virtual camera with the given API 1 camera
+     * id. When the camera device awareness flag is disabled, this will return std::nullopt.
+     */
+    std::optional<std::string> getActualCameraId(int api1CameraId, int32_t deviceId)
+            const EXCLUDES(mLock);
+
+private:
+    mutable std::mutex mLock;
+
+    // Map of (deviceId, app-visible cameraId) -> HAL-visible cameraId
+    std::map<std::pair<int32_t, std::string>, std::string>
+            mDeviceIdMappedCameraIdPairToCameraIdMap GUARDED_BY(mLock);
+};
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_VIRTUAL_DEVICE_CAMERA_ID_MAPPER_H
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index cb4e10f..90530f6 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -11,6 +12,7 @@
         "libbinder",
         "libbinder_ndk",
         "libcamera_metadata",
+        "libexif",
         "liblog",
         "libfmq",
         "libgui",
@@ -46,7 +48,7 @@
     name: "libvirtualcamera_utils",
     srcs: [
         "util/JpegUtil.cc",
-        "util/MetadataBuilder.cc",
+        "util/MetadataUtil.cc",
         "util/Util.cc",
         "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
@@ -54,7 +56,7 @@
         "util/EglProgram.cc",
         "util/EglSurfaceTexture.cc",
         "util/EglUtil.cc",
-        "util/Permissions.cc"
+        "util/Permissions.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/TEST_MAPPING b/services/camera/virtualcamera/TEST_MAPPING
index 66c5e52..e976704 100644
--- a/services/camera/virtualcamera/TEST_MAPPING
+++ b/services/camera/virtualcamera/TEST_MAPPING
@@ -2,16 +2,15 @@
   "presubmit" : [
     {
       "name": "virtual_camera_tests"
-    }
-  ],
-  "postsubmit": [
+    },
     {
       "name": "CtsVirtualDevicesCameraTestCases",
       "options": [
         {
           "exclude-annotation": "androidx.test.filters.FlakyTest"
         }
-      ]
+      ],
+      "keywords": ["primary-device"]
     }
   ]
 }
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index 84f721b..d4c6423 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -23,11 +23,14 @@
 #include <chrono>
 #include <cstdint>
 #include <iterator>
+#include <numeric>
 #include <optional>
 #include <string>
+#include <vector>
 
 #include "VirtualCameraSession.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
@@ -35,7 +38,7 @@
 #include "android/binder_status.h"
 #include "log/log.h"
 #include "system/camera_metadata.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
@@ -44,7 +47,10 @@
 
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraResourceCost;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
@@ -64,27 +70,80 @@
 // Prefix of camera name - "device@1.1/virtual/{numerical_id}"
 const char* kDevicePathPrefix = "device@1.1/virtual/";
 
-constexpr std::chrono::nanoseconds kMinFrameDuration30Fps = 1s / 30;
 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
 
+constexpr int32_t kMinFps = 15;
+
+constexpr std::chrono::nanoseconds kMaxFrameDuration =
+    std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+
+constexpr uint8_t kPipelineMaxDepth = 2;
+
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
-struct Resolution {
-  Resolution(const int w, const int h) : width(w), height(h) {
-  }
+const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
+    Resolution(176, 144), Resolution(240, 144), Resolution(256, 144),
+    Resolution(240, 160), Resolution(240, 180)};
 
-  bool operator<(const Resolution& other) const {
-    return width * height < other.width * other.height;
-  }
+const std::array<PixelFormat, 3> kOutputFormats{
+    PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
+    PixelFormat::BLOB};
 
-  bool operator==(const Resolution& other) const {
-    return width == other.width && height == other.height;
-  }
-
-  const int width;
-  const int height;
+// The resolutions below will used to extend the set of supported output formats.
+// All resolutions with lower pixel count and same aspect ratio as some supported
+// input resolution will be added to the set of supported output resolutions.
+const std::array<Resolution, 10> kOutputResolutions{
+    Resolution(320, 240),   Resolution(640, 360),  Resolution(640, 480),
+    Resolution(720, 480),   Resolution(720, 576),  Resolution(800, 600),
+    Resolution(1024, 576),  Resolution(1280, 720), Resolution(1280, 960),
+    Resolution(1280, 1080),
 };
 
+std::vector<Resolution> getSupportedJpegThumbnailSizes(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  auto isSupportedByAnyInputConfig =
+      [&configs](const Resolution thumbnailResolution) {
+        return std::any_of(
+            configs.begin(), configs.end(),
+            [thumbnailResolution](const SupportedStreamConfiguration& config) {
+              return isApproximatellySameAspectRatio(
+                  thumbnailResolution, Resolution(config.width, config.height));
+            });
+      };
+
+  std::vector<Resolution> supportedThumbnailSizes({Resolution(0, 0)});
+  std::copy_if(kStandardJpegThumbnailSizes.begin(),
+               kStandardJpegThumbnailSizes.end(),
+               std::back_insert_iterator(supportedThumbnailSizes),
+               isSupportedByAnyInputConfig);
+  return supportedThumbnailSizes;
+}
+
+bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
+  return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
+         kOutputFormats.end();
+}
+
+std::vector<FpsRange> fpsRangesForInputConfig(
+    const std::vector<SupportedStreamConfiguration>& configs) {
+  std::set<FpsRange> availableRanges;
+
+  for (const SupportedStreamConfiguration& config : configs) {
+    availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
+    availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
+  }
+
+  if (std::any_of(configs.begin(), configs.end(),
+                  [](const SupportedStreamConfiguration& config) {
+                    return config.maxFps >= 30;
+                  })) {
+    availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
+    availableRanges.insert({.minFps = 30, .maxFps = 30});
+  }
+
+  return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
+}
+
 std::optional<Resolution> getMaxResolution(
     const std::vector<SupportedStreamConfiguration>& configs) {
   auto itMax = std::max_element(configs.begin(), configs.end(),
@@ -103,24 +162,65 @@
   return Resolution(itMax->width, itMax->height);
 }
 
-std::set<Resolution> getUniqueResolutions(
+// Returns a map of unique resolution to maximum maxFps for all streams with
+// that resolution.
+std::map<Resolution, int> getResolutionToMaxFpsMap(
     const std::vector<SupportedStreamConfiguration>& configs) {
-  std::set<Resolution> uniqueResolutions;
-  std::transform(configs.begin(), configs.end(),
-                 std::inserter(uniqueResolutions, uniqueResolutions.begin()),
-                 [](const SupportedStreamConfiguration& config) {
-                   return Resolution(config.width, config.height);
-                 });
-  return uniqueResolutions;
+  std::map<Resolution, int> resolutionToMaxFpsMap;
+
+  for (const SupportedStreamConfiguration& config : configs) {
+    Resolution resolution(config.width, config.height);
+    if (resolutionToMaxFpsMap.find(resolution) == resolutionToMaxFpsMap.end()) {
+      resolutionToMaxFpsMap[resolution] = config.maxFps;
+    } else {
+      int currentMaxFps = resolutionToMaxFpsMap[resolution];
+      resolutionToMaxFpsMap[resolution] = std::max(currentMaxFps, config.maxFps);
+    }
+  }
+
+  std::map<Resolution, int> additionalResolutionToMaxFpsMap;
+  // Add additional resolutions we can support by downscaling input streams with
+  // same aspect ratio.
+  for (const Resolution& outputResolution : kOutputResolutions) {
+    for (const auto& [resolution, maxFps] : resolutionToMaxFpsMap) {
+      if (resolutionToMaxFpsMap.find(outputResolution) !=
+          resolutionToMaxFpsMap.end()) {
+        // Resolution is already in the map, skip it.
+        continue;
+      }
+
+      if (outputResolution < resolution &&
+          isApproximatellySameAspectRatio(outputResolution, resolution)) {
+        // Lower resolution with same aspect ratio, we can achieve this by
+        // downscaling, let's add it to the map.
+        ALOGD(
+            "Extending set of output resolutions with %dx%d which has same "
+            "aspect ratio as supported input %dx%d.",
+            outputResolution.width, outputResolution.height, resolution.width,
+            resolution.height);
+        additionalResolutionToMaxFpsMap[outputResolution] = maxFps;
+        break;
+      }
+    }
+  }
+
+  // Add all resolution we can achieve by downscaling to the map.
+  resolutionToMaxFpsMap.insert(additionalResolutionToMaxFpsMap.begin(),
+                               additionalResolutionToMaxFpsMap.end());
+
+  return resolutionToMaxFpsMap;
 }
 
 // TODO(b/301023410) - Populate camera characteristics according to camera configuration.
 std::optional<CameraMetadata> initCameraCharacteristics(
-    const std::vector<SupportedStreamConfiguration>& supportedInputConfig) {
+    const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
+    const SensorOrientation sensorOrientation, const LensFacing lensFacing,
+    const int32_t deviceId) {
   if (!std::all_of(supportedInputConfig.begin(), supportedInputConfig.end(),
                    [](const SupportedStreamConfiguration& config) {
                      return isFormatSupportedForInput(
-                         config.width, config.height, config.pixelFormat);
+                         config.width, config.height, config.pixelFormat,
+                         config.maxFps);
                    })) {
     ALOGE("%s: input configuration contains unsupported format", __func__);
     return std::nullopt;
@@ -130,27 +230,124 @@
       MetadataBuilder()
           .setSupportedHardwareLevel(
               ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
+          .setDeviceId(deviceId)
           .setFlashAvailable(false)
-          .setLensFacing(ANDROID_LENS_FACING_EXTERNAL)
-          .setSensorOrientation(0)
+          .setLensFacing(
+              static_cast<camera_metadata_enum_android_lens_facing>(lensFacing))
+          .setAvailableFocalLengths({VirtualCameraDevice::kFocalLength})
+          .setSensorOrientation(static_cast<int32_t>(sensorOrientation))
+          .setSensorReadoutTimestamp(
+              ANDROID_SENSOR_READOUT_TIMESTAMP_NOT_SUPPORTED)
+          .setSensorTimestampSource(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN)
+          .setSensorPhysicalSize(36.0, 24.0)
+          .setAvailableAberrationCorrectionModes(
+              {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF})
+          .setAvailableNoiseReductionModes({ANDROID_NOISE_REDUCTION_MODE_OFF})
           .setAvailableFaceDetectModes({ANDROID_STATISTICS_FACE_DETECT_MODE_OFF})
+          .setAvailableStreamUseCases(
+              {ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL,
+               ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL})
+          .setAvailableTestPatternModes({ANDROID_SENSOR_TEST_PATTERN_MODE_OFF})
           .setAvailableMaxDigitalZoom(1.0)
           .setControlAvailableModes({ANDROID_CONTROL_MODE_AUTO})
           .setControlAfAvailableModes({ANDROID_CONTROL_AF_MODE_OFF})
-          .setControlAeAvailableFpsRange(10, 30)
+          .setControlAvailableSceneModes({ANDROID_CONTROL_SCENE_MODE_DISABLED})
+          .setControlAvailableEffects({ANDROID_CONTROL_EFFECT_MODE_OFF})
+          .setControlAvailableVideoStabilizationModes(
+              {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF})
+          .setControlAeAvailableModes({ANDROID_CONTROL_AE_MODE_ON})
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO})
+          .setControlAeAvailableFpsRanges(
+              fpsRangesForInputConfig(supportedInputConfig))
           .setControlMaxRegions(0, 0, 0)
           .setControlAfRegions({kDefaultEmptyControlRegion})
           .setControlAeRegions({kDefaultEmptyControlRegion})
           .setControlAwbRegions({kDefaultEmptyControlRegion})
-          .setControlAeCompensationRange(0, 1)
+          .setControlAeCompensationRange(0, 0)
           .setControlAeCompensationStep(camera_metadata_rational_t{0, 1})
+          .setControlAwbLockAvailable(false)
+          .setControlAeLockAvailable(false)
+          .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
           .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
+          .setCroppingType(ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY)
+          .setJpegAvailableThumbnailSizes(
+              getSupportedJpegThumbnailSizes(supportedInputConfig))
           .setMaxJpegSize(kMaxJpegSize)
-          .setAvailableRequestKeys({ANDROID_CONTROL_AF_MODE})
-          .setAvailableResultKeys({ANDROID_CONTROL_AF_MODE})
+          .setMaxFaceCount(0)
+          .setMaxFrameDuration(kMaxFrameDuration)
+          .setMaxNumberOutputStreams(
+              VirtualCameraDevice::kMaxNumberOfRawStreams,
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+              VirtualCameraDevice::kMaxNumberOfStallStreams)
+          .setRequestPartialResultCount(1)
+          .setPipelineMaxDepth(kPipelineMaxDepth)
+          .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
+          .setAvailableRequestKeys({ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+                                    ANDROID_CONTROL_CAPTURE_INTENT,
+                                    ANDROID_CONTROL_AE_MODE,
+                                    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                    ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                                    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                                    ANDROID_CONTROL_AF_TRIGGER,
+                                    ANDROID_CONTROL_AF_MODE,
+                                    ANDROID_CONTROL_AWB_MODE,
+                                    ANDROID_SCALER_CROP_REGION,
+                                    ANDROID_CONTROL_EFFECT_MODE,
+                                    ANDROID_CONTROL_MODE,
+                                    ANDROID_CONTROL_SCENE_MODE,
+                                    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+                                    ANDROID_CONTROL_ZOOM_RATIO,
+                                    ANDROID_FLASH_MODE,
+                                    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                                    ANDROID_JPEG_ORIENTATION,
+                                    ANDROID_JPEG_QUALITY,
+                                    ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    ANDROID_JPEG_THUMBNAIL_SIZE,
+                                    ANDROID_NOISE_REDUCTION_MODE,
+                                    ANDROID_STATISTICS_FACE_DETECT_MODE})
+          .setAvailableResultKeys({
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+              ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+              ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+              ANDROID_CONTROL_AE_LOCK,
+              ANDROID_CONTROL_AE_MODE,
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+              ANDROID_CONTROL_AE_STATE,
+              ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+              ANDROID_CONTROL_AF_MODE,
+              ANDROID_CONTROL_AF_STATE,
+              ANDROID_CONTROL_AF_TRIGGER,
+              ANDROID_CONTROL_AWB_LOCK,
+              ANDROID_CONTROL_AWB_MODE,
+              ANDROID_CONTROL_AWB_STATE,
+              ANDROID_CONTROL_CAPTURE_INTENT,
+              ANDROID_CONTROL_EFFECT_MODE,
+              ANDROID_CONTROL_MODE,
+              ANDROID_CONTROL_SCENE_MODE,
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+              ANDROID_STATISTICS_FACE_DETECT_MODE,
+              ANDROID_FLASH_MODE,
+              ANDROID_FLASH_STATE,
+              ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+              ANDROID_JPEG_QUALITY,
+              ANDROID_JPEG_THUMBNAIL_QUALITY,
+              ANDROID_LENS_FOCAL_LENGTH,
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+              ANDROID_NOISE_REDUCTION_MODE,
+              ANDROID_REQUEST_PIPELINE_DEPTH,
+              ANDROID_SENSOR_TIMESTAMP,
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+              ANDROID_STATISTICS_SCENE_FLICKER,
+          })
           .setAvailableCapabilities(
-              {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE})
-          .setAvailableCharacteristicKeys();
+              {ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE});
 
   // Active array size must correspond to largest supported input resolution.
   std::optional<Resolution> maxResolution =
@@ -160,56 +357,36 @@
   }
   builder.setSensorActiveArraySize(0, 0, maxResolution->width,
                                    maxResolution->height);
+  builder.setSensorPixelArraySize(maxResolution->width, maxResolution->height);
 
   std::vector<MetadataBuilder::StreamConfiguration> outputConfigurations;
 
   // TODO(b/301023410) Add also all "standard" resolutions we can rescale the
   // streams to (all standard resolutions with same aspect ratio).
 
-  // Add IMPLEMENTATION_DEFINED format for all supported input resolutions.
-  std::set<Resolution> uniqueResolutions =
-      getUniqueResolutions(supportedInputConfig);
-  std::transform(
-      uniqueResolutions.begin(), uniqueResolutions.end(),
-      std::back_inserter(outputConfigurations),
-      [](const Resolution& resolution) {
-        return MetadataBuilder::StreamConfiguration{
-            .width = resolution.width,
-            .height = resolution.height,
-            .format = ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-            .minFrameDuration = kMinFrameDuration30Fps,
-            .minStallDuration = 0s};
-      });
+  std::map<Resolution, int> resolutionToMaxFpsMap =
+      getResolutionToMaxFpsMap(supportedInputConfig);
 
-  // Add all supported configuration with explicit pixel format.
-  std::transform(supportedInputConfig.begin(), supportedInputConfig.end(),
-                 std::back_inserter(outputConfigurations),
-                 [](const SupportedStreamConfiguration& config) {
-                   return MetadataBuilder::StreamConfiguration{
-                       .width = config.width,
-                       .height = config.height,
-                       .format = static_cast<int>(config.pixelFormat),
-                       .minFrameDuration = kMinFrameDuration30Fps,
-                       .minStallDuration = 0s};
-                 });
-
-  // TODO(b/301023410) We currently don't support rescaling for still capture,
-  // so only announce BLOB support for formats exactly matching the input.
-  std::transform(uniqueResolutions.begin(), uniqueResolutions.end(),
-                 std::back_inserter(outputConfigurations),
-                 [](const Resolution& resolution) {
-                   return MetadataBuilder::StreamConfiguration{
-                       .width = resolution.width,
-                       .height = resolution.height,
-                       .format = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                       .minFrameDuration = kMinFrameDuration30Fps,
-                       .minStallDuration = 0s};
-                 });
+  // Add configurations for all unique input resolutions and output formats.
+  for (const PixelFormat format : kOutputFormats) {
+    std::transform(
+        resolutionToMaxFpsMap.begin(), resolutionToMaxFpsMap.end(),
+        std::back_inserter(outputConfigurations), [format](const auto& entry) {
+          Resolution resolution = entry.first;
+          int maxFps = entry.second;
+          return MetadataBuilder::StreamConfiguration{
+              .width = resolution.width,
+              .height = resolution.height,
+              .format = static_cast<int32_t>(format),
+              .minFrameDuration = std::chrono::nanoseconds(1s) / maxFps,
+              .minStallDuration = 0s};
+        });
+  }
 
   ALOGV("Adding %zu output configurations", outputConfigurations.size());
   builder.setAvailableOutputStreamConfigurations(outputConfigurations);
 
-  auto metadata = builder.build();
+  auto metadata = builder.setAvailableCharacteristicKeys().build();
   if (metadata == nullptr) {
     ALOGE("Failed to build metadata!");
     return CameraMetadata();
@@ -221,14 +398,14 @@
 }  // namespace
 
 VirtualCameraDevice::VirtualCameraDevice(
-    const uint32_t cameraId,
-    const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
-    std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback)
+    const uint32_t cameraId, const VirtualCameraConfiguration& configuration,
+    int32_t deviceId)
     : mCameraId(cameraId),
-      mVirtualCameraClientCallback(virtualCameraClientCallback),
-      mSupportedInputConfigurations(supportedInputConfig) {
-  std::optional<CameraMetadata> metadata =
-      initCameraCharacteristics(mSupportedInputConfigurations);
+      mVirtualCameraClientCallback(configuration.virtualCameraCallback),
+      mSupportedInputConfigurations(configuration.supportedStreamConfigs) {
+  std::optional<CameraMetadata> metadata = initCameraCharacteristics(
+      mSupportedInputConfigurations, configuration.sensorOrientation,
+      configuration.lensFacing, deviceId);
   if (metadata.has_value()) {
     mCameraCharacteristics = *metadata;
   } else {
@@ -286,6 +463,29 @@
 
 bool VirtualCameraDevice::isStreamCombinationSupported(
     const StreamConfiguration& streamConfiguration) const {
+  if (streamConfiguration.streams.empty()) {
+    ALOGE("%s: Querying empty configuration", __func__);
+    return false;
+  }
+
+  const std::vector<Stream>& streams = streamConfiguration.streams;
+
+  Resolution firstStreamResolution(streams[0].width, streams[0].height);
+  auto isSameAspectRatioAsFirst = [firstStreamResolution](const Stream& stream) {
+    return isApproximatellySameAspectRatio(
+        firstStreamResolution, Resolution(stream.width, stream.height));
+  };
+  if (!std::all_of(streams.begin(), streams.end(), isSameAspectRatioAsFirst)) {
+    ALOGW(
+        "%s: Requested streams do not have same aspect ratio. Different aspect "
+        "ratios are currently "
+        "not supported by virtual camera. Stream configuration: %s",
+        __func__, streamConfiguration.toString().c_str());
+    return false;
+  }
+
+  int numberOfProcessedStreams = 0;
+  int numberOfStallStreams = 0;
   for (const Stream& stream : streamConfiguration.streams) {
     ALOGV("%s: Configuration queried: %s", __func__, stream.toString().c_str());
 
@@ -294,18 +494,25 @@
       return false;
     }
 
-    // TODO(b/301023410) remove hardcoded format checks, verify against configuration.
     if (stream.rotation != StreamRotation::ROTATION_0 ||
-        (stream.format != PixelFormat::IMPLEMENTATION_DEFINED &&
-         stream.format != PixelFormat::YCBCR_420_888 &&
-         stream.format != PixelFormat::BLOB)) {
+        !isSupportedOutputFormat(stream.format)) {
       ALOGV("Unsupported output stream type");
       return false;
     }
 
+    if (stream.format == PixelFormat::BLOB) {
+      numberOfStallStreams++;
+    } else {
+      numberOfProcessedStreams++;
+    }
+
+    Resolution requestedResolution(stream.width, stream.height);
     auto matchesSupportedInputConfig =
-        [&stream](const SupportedStreamConfiguration& config) {
-          return stream.width == config.width && stream.height == config.height;
+        [requestedResolution](const SupportedStreamConfiguration& config) {
+          Resolution supportedInputResolution(config.width, config.height);
+          return requestedResolution <= supportedInputResolution &&
+                 isApproximatellySameAspectRatio(requestedResolution,
+                                                 supportedInputResolution);
         };
     if (std::none_of(mSupportedInputConfigurations.begin(),
                      mSupportedInputConfigurations.end(),
@@ -314,6 +521,19 @@
       return false;
     }
   }
+
+  if (numberOfProcessedStreams > kMaxNumberOfProcessedStreams) {
+    ALOGE("%s: %d processed streams exceeds the supported maximum of %d",
+          __func__, numberOfProcessedStreams, kMaxNumberOfProcessedStreams);
+    return false;
+  }
+
+  if (numberOfStallStreams > kMaxNumberOfStallStreams) {
+    ALOGE("%s: %d stall streams exceeds the supported maximum of %d", __func__,
+          numberOfStallStreams, kMaxNumberOfStallStreams);
+    return false;
+  }
+
   return true;
 }
 
@@ -368,6 +588,24 @@
   return std::string(kDevicePathPrefix) + std::to_string(mCameraId);
 }
 
+const std::vector<SupportedStreamConfiguration>&
+VirtualCameraDevice::getInputConfigs() const {
+  return mSupportedInputConfigurations;
+}
+
+Resolution VirtualCameraDevice::getMaxInputResolution() const {
+  std::optional<Resolution> maxResolution =
+      getMaxResolution(mSupportedInputConfigurations);
+  if (!maxResolution.has_value()) {
+    ALOGE(
+        "%s: Cannot determine sensor size for virtual camera - input "
+        "configurations empty?",
+        __func__);
+    return Resolution(0, 0);
+  }
+  return maxResolution.value();
+}
+
 std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
   // SharedRefBase which BnCameraDevice inherits from breaks
   // std::enable_shared_from_this. This is recommended replacement for
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index 402de6c..cba0674 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -22,7 +22,10 @@
 
 #include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/BnCameraDevice.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -35,12 +38,9 @@
  public:
   explicit VirtualCameraDevice(
       uint32_t cameraId,
-      const std::vector<
-          aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
-          supportedInputConfig,
-      std::shared_ptr<
-          ::aidl::android::companion::virtualcamera::IVirtualCameraCallback>
-          virtualCameraClientCallback = nullptr);
+      const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+          configuration,
+      int32_t deviceId);
 
   virtual ~VirtualCameraDevice() override = default;
 
@@ -97,6 +97,41 @@
 
   uint32_t getCameraId() const { return mCameraId; }
 
+  const std::vector<
+      aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
+  getInputConfigs() const;
+
+  // Returns largest supported input resolution.
+  Resolution getMaxInputResolution() const;
+
+  // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
+  static constexpr int32_t kMaxNumberOfRawStreams = 0;
+
+  // Maximal number of non-jpeg streams configured concurrently in single
+  // session. This should be at least 3 and can be increased at the potential
+  // cost of more CPU/GPU load if there are many concurrent streams.
+  static constexpr int32_t kMaxNumberOfProcessedStreams = 3;
+
+  // Maximal number of stalling (in case of virtual camera only jpeg for now)
+  // streams. Can be increaed at the cost of potential cost of more GPU/CPU
+  // load.
+  static constexpr int32_t kMaxNumberOfStallStreams = 1;
+
+  // Focal length for full frame sensor.
+  static constexpr float kFocalLength = 43.0;
+
+  // Default JPEG compression quality.
+  static constexpr uint8_t kDefaultJpegQuality = 80;
+
+  // Default JPEG orientation.
+  static constexpr uint8_t kDefaultJpegOrientation = 0;
+
+  // Default Make and Model for Exif
+  static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
+
+  static constexpr camera_metadata_enum_android_control_capture_intent_t
+      kDefaultCaptureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+
  private:
   std::shared_ptr<VirtualCameraDevice> sharedFromThis();
 
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.cc b/services/camera/virtualcamera/VirtualCameraProvider.cc
index 25a43d6..67eaec0 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.cc
+++ b/services/camera/virtualcamera/VirtualCameraProvider.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -33,8 +33,7 @@
 namespace companion {
 namespace virtualcamera {
 
-using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
-using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::common::VendorTagSection;
@@ -43,10 +42,6 @@
 using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
 using ::aidl::android::hardware::camera::provider::ICameraProviderCallback;
 
-// TODO(b/301023410) Make camera id range configurable / dynamic
-// based on already registered devices.
-std::atomic_int VirtualCameraProvider::sNextId{42};
-
 ndk::ScopedAStatus VirtualCameraProvider::setCallback(
     const std::shared_ptr<ICameraProviderCallback>& in_callback) {
   ALOGV("%s", __func__);
@@ -155,10 +150,16 @@
 }
 
 std::shared_ptr<VirtualCameraDevice> VirtualCameraProvider::createCamera(
-    const std::vector<SupportedStreamConfiguration>& supportedInputConfig,
-    std::shared_ptr<IVirtualCameraCallback> virtualCameraClientCallback) {
+    const VirtualCameraConfiguration& configuration, const int cameraId,
+    const int32_t deviceId) {
+  if (cameraId < 0) {
+    ALOGE("%s: Cannot create camera with negative id. cameraId: %d", __func__,
+          cameraId);
+    return nullptr;
+  }
+
   auto camera = ndk::SharedRefBase::make<VirtualCameraDevice>(
-      sNextId++, supportedInputConfig, virtualCameraClientCallback);
+      cameraId, configuration, deviceId);
   std::shared_ptr<ICameraProviderCallback> callback;
   {
     const std::lock_guard<std::mutex> lock(mLock);
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.h b/services/camera/virtualcamera/VirtualCameraProvider.h
index d41a005..c536547 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.h
+++ b/services/camera/virtualcamera/VirtualCameraProvider.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -74,14 +74,10 @@
 
   // Create new virtual camera devices
   // Returns nullptr if creation was not successful.
-  //
-  // TODO(b/301023410) - Add camera configuration.
   std::shared_ptr<VirtualCameraDevice> createCamera(
-      const std::vector<
-          aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
-          supportedInputConfig,
-      std::shared_ptr<aidl::android::companion::virtualcamera::IVirtualCameraCallback>
-          virtualCameraClientCallback = nullptr);
+      const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+          configuration,
+      int cameraId, int32_t deviceId);
 
   std::shared_ptr<VirtualCameraDevice> getCamera(const std::string& name);
 
@@ -96,13 +92,10 @@
 
   std::map<std::string, std::shared_ptr<VirtualCameraDevice>> mCameras
       GUARDED_BY(mLock);
-
-  // Numerical id to assign to next created camera.
-  static std::atomic_int sNextId;
 };
 
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
 
-#endif  // ANDROID_SERVICES_VIRTUAL_CAMERA_VIRTUALCAMERAPROVIDER_H
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERAPROVIDER_H
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 79c91ef..ca62cce 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -14,21 +14,27 @@
  * limitations under the License.
  */
 
+#include "hardware/gralloc.h"
 #define LOG_TAG "VirtualCameraRenderThread"
 #include "VirtualCameraRenderThread.h"
 
 #include <chrono>
-#include <cstddef>
 #include <cstdint>
+#include <cstring>
 #include <future>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <vector>
 
+#include "Exif.h"
 #include "GLES/gl.h"
+#include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraBlob.h"
+#include "aidl/android/hardware/camera/device/CameraBlobId.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureResult.h"
 #include "aidl/android/hardware/camera/device/ErrorCode.h"
@@ -39,10 +45,12 @@
 #include "android-base/thread_annotations.h"
 #include "android/binder_auto_utils.h"
 #include "android/hardware_buffer.h"
+#include "system/camera_metadata.h"
 #include "ui/GraphicBuffer.h"
+#include "ui/Rect.h"
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/TestPatternHelper.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
@@ -53,6 +61,8 @@
 
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferStatus;
+using ::aidl::android::hardware::camera::device::CameraBlob;
+using ::aidl::android::hardware::camera::device::CameraBlobId;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::device::CaptureResult;
 using ::aidl::android::hardware::camera::device::ErrorCode;
@@ -65,16 +75,94 @@
 using ::aidl::android::hardware::graphics::common::PixelFormat;
 using ::android::base::ScopedLockAssertion;
 
+using ::android::hardware::camera::common::helper::ExifUtils;
+
 namespace {
 
 using namespace std::chrono_literals;
 
 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
 
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
+
 CameraMetadata createCaptureResultMetadata(
-    const std::chrono::nanoseconds timestamp) {
-  std::unique_ptr<CameraMetadata> metadata =
-      MetadataBuilder().setSensorTimestamp(timestamp).build();
+    const std::chrono::nanoseconds timestamp,
+    const RequestSettings& requestSettings,
+    const Resolution reportedSensorSize) {
+  // All of the keys used in the response needs to be referenced in
+  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+  // in VirtualCameraDevice.cc).
+  MetadataBuilder builder =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+          .setControlAeExposureCompensation(0)
+          .setControlAeLockAvailable(false)
+          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAePrecaptureTrigger(
+              // Limited devices are expected to have precapture ae enabled and
+              // respond to cancellation request. Since we don't actuall support
+              // AE at all, let's just respect the cancellation expectation in
+              // case it's requested
+              requestSettings.aePrecaptureTrigger ==
+                      ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+          .setControlCaptureIntent(requestSettings.captureIntent)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+          .setControlVideoStabilizationMode(
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
+          .setCropRegion(0, 0, reportedSensorSize.width,
+                         reportedSensorSize.height)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFocalLength(VirtualCameraDevice::kFocalLength)
+          .setJpegQuality(requestSettings.jpegQuality)
+          .setJpegOrientation(requestSettings.jpegOrientation)
+          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+                                requestSettings.thumbnailResolution.height)
+          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+          .setLensOpticalStabilizationMode(
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .setPipelineDepth(kPipelineDepth)
+          .setSensorTimestamp(timestamp)
+          .setStatisticsHotPixelMapMode(
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+          .setStatisticsLensShadingMapMode(
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+  if (requestSettings.fpsRange.has_value()) {
+    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+  }
+
+  if (requestSettings.gpsCoordinates.has_value()) {
+    const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+    builder.setJpegGpsCoordinates(coordinates);
+  }
+
+  std::unique_ptr<CameraMetadata> metadata = builder.build();
+
   if (metadata == nullptr) {
     ALOGE("%s: Failed to build capture result metadata", __func__);
     return CameraMetadata();
@@ -150,6 +238,46 @@
   }
 }
 
+std::vector<uint8_t> createExif(
+    Resolution imageSize, const CameraMetadata resultMetadata,
+    const std::vector<uint8_t>& compressedThumbnail = {}) {
+  std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+  exifUtils->initialize();
+
+  // Make a copy of the metadata in order to converting it the HAL metadata
+  // format (as opposed to the AIDL class) and use the setFromMetadata method
+  // from ExifUtil
+  camera_metadata_t* rawSettings =
+      clone_camera_metadata((camera_metadata_t*)resultMetadata.metadata.data());
+  if (rawSettings != nullptr) {
+    android::hardware::camera::common::helper::CameraMetadata halMetadata(
+        rawSettings);
+    exifUtils->setFromMetadata(halMetadata, imageSize.width, imageSize.height);
+  }
+  exifUtils->setMake(VirtualCameraDevice::kDefaultMakeAndModel);
+  exifUtils->setModel(VirtualCameraDevice::kDefaultMakeAndModel);
+  exifUtils->setFlash(0);
+
+  std::vector<uint8_t> app1Data;
+
+  size_t thumbnailDataSize = compressedThumbnail.size();
+  const void* thumbnailData =
+      thumbnailDataSize > 0
+          ? reinterpret_cast<const void*>(compressedThumbnail.data())
+          : nullptr;
+
+  if (!exifUtils->generateApp1(thumbnailData, thumbnailDataSize)) {
+    ALOGE("%s: Failed to generate APP1 segment for EXIF metadata", __func__);
+    return app1Data;
+  }
+
+  const uint8_t* data = exifUtils->getApp1Buffer();
+  const size_t size = exifUtils->getApp1Length();
+
+  app1Data.insert(app1Data.end(), data, data + size);
+  return app1Data;
+}
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -170,12 +298,12 @@
 }
 
 VirtualCameraRenderThread::VirtualCameraRenderThread(
-    VirtualCameraSessionContext& sessionContext, const int mWidth,
-    const int mHeight,
+    VirtualCameraSessionContext& sessionContext,
+    const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
     std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
     : mCameraDeviceCallback(cameraDeviceCallback),
-      mInputSurfaceWidth(mWidth),
-      mInputSurfaceHeight(mHeight),
+      mInputSurfaceSize(inputSurfaceSize),
+      mReportedSensorSize(reportedSensorSize),
       mTestMode(testMode),
       mSessionContext(sessionContext) {
 }
@@ -188,8 +316,11 @@
 }
 
 ProcessCaptureRequestTask::ProcessCaptureRequestTask(
-    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers)
-    : mFrameNumber(frameNumber), mBuffers(requestBuffers) {
+    int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+    const RequestSettings& requestSettings)
+    : mFrameNumber(frameNumber),
+      mBuffers(requestBuffers),
+      mRequestSettings(requestSettings) {
 }
 
 int ProcessCaptureRequestTask::getFrameNumber() const {
@@ -201,6 +332,10 @@
   return mBuffers;
 }
 
+const RequestSettings& ProcessCaptureRequestTask::getRequestSettings() const {
+  return mRequestSettings;
+}
+
 void VirtualCameraRenderThread::enqueueTask(
     std::unique_ptr<ProcessCaptureRequestTask> task) {
   std::lock_guard<std::mutex> lock(mLock);
@@ -263,14 +398,25 @@
       std::make_unique<EglTextureProgram>(EglTextureProgram::TextureFormat::YUV);
   mEglTextureRgbProgram = std::make_unique<EglTextureProgram>(
       EglTextureProgram::TextureFormat::RGBA);
-  mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(mInputSurfaceWidth,
-                                                           mInputSurfaceHeight);
+  mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
+      mInputSurfaceSize.width, mInputSurfaceSize.height);
+
+  sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
+  if (mTestMode) {
+    inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
+  }
   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
 
   while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
     processCaptureRequest(*task);
   }
 
+  // Destroy EGL utilities still on the render thread.
+  mEglSurfaceTexture.reset();
+  mEglTextureRgbProgram.reset();
+  mEglTextureYuvProgram.reset();
+  mEglDisplayContext.reset();
+
   ALOGV("Render thread exiting");
 }
 
@@ -287,7 +433,8 @@
   captureResult.partialResult = 1;
   captureResult.inputBuffer.streamId = -1;
   captureResult.physicalCameraMetadata.resize(0);
-  captureResult.result = createCaptureResultMetadata(timestamp);
+  captureResult.result = createCaptureResultMetadata(
+      timestamp, request.getRequestSettings(), mReportedSensorSize);
 
   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
   captureResult.outputBuffers.resize(buffers.size());
@@ -316,9 +463,10 @@
     }
 
     auto status = streamConfig->format == PixelFormat::BLOB
-                      ? renderIntoBlobStreamBuffer(reqBuffer.getStreamId(),
-                                                   reqBuffer.getBufferId(),
-                                                   reqBuffer.getFence())
+                      ? renderIntoBlobStreamBuffer(
+                            reqBuffer.getStreamId(), reqBuffer.getBufferId(),
+                            captureResult.result, request.getRequestSettings(),
+                            reqBuffer.getFence())
                       : renderIntoImageStreamBuffer(reqBuffer.getStreamId(),
                                                     reqBuffer.getBufferId(),
                                                     reqBuffer.getFence());
@@ -354,7 +502,7 @@
     return;
   }
 
-  ALOGD("%s: Successfully called processCaptureResult", __func__);
+  ALOGV("%s: Successfully called processCaptureResult", __func__);
 }
 
 void VirtualCameraRenderThread::flushCaptureRequest(
@@ -398,9 +546,56 @@
   }
 }
 
+std::vector<uint8_t> VirtualCameraRenderThread::createThumbnail(
+    const Resolution resolution, const int quality) {
+  if (resolution.width == 0 || resolution.height == 0) {
+    ALOGV("%s: Skipping thumbnail creation, zero size requested", __func__);
+    return {};
+  }
+
+  ALOGV("%s: Creating thumbnail with size %d x %d, quality %d", __func__,
+        resolution.width, resolution.height, quality);
+  Resolution bufferSize = roundTo2DctSize(resolution);
+  std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
+      mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
+  if (framebuffer == nullptr) {
+    ALOGE(
+        "Failed to allocate temporary framebuffer for JPEG thumbnail "
+        "compression");
+    return {};
+  }
+
+  // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+  // doesn't correspond
+  //  to input texture aspect ratio.
+  if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
+                                Rect(resolution.width, resolution.height))
+           .isOk()) {
+    ALOGE(
+        "Failed to render input texture into temporary framebuffer for JPEG "
+        "thumbnail");
+    return {};
+  }
+
+  std::vector<uint8_t> compressedThumbnail;
+  compressedThumbnail.resize(kJpegThumbnailBufferSize);
+  ALOGE("%s: Compressing thumbnail %d x %d", __func__, resolution.width,
+        resolution.height);
+  std::optional<size_t> compressedSize =
+      compressJpeg(resolution.width, resolution.height, quality,
+                   framebuffer->getHardwareBuffer(), {},
+                   compressedThumbnail.size(), compressedThumbnail.data());
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress jpeg thumbnail", __func__);
+    return {};
+  }
+  compressedThumbnail.resize(compressedSize.value());
+  return compressedThumbnail;
+}
+
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoBlobStreamBuffer(
-    const int streamId, const int bufferId, sp<Fence> fence) {
-  ALOGV("%s", __func__);
+    const int streamId, const int bufferId, const CameraMetadata& resultMetadata,
+    const RequestSettings& requestSettings, sp<Fence> fence) {
   std::shared_ptr<AHardwareBuffer> hwBuffer =
       mSessionContext.fetchHardwareBuffer(streamId, bufferId);
   if (hwBuffer == nullptr) {
@@ -415,74 +610,64 @@
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
+  ALOGV("%s: Rendering JPEG with size %d x %d, quality %d", __func__,
+        stream->width, stream->height, requestSettings.jpegQuality);
+
   // Let's create YUV framebuffer and render the surface into this.
   // This will take care about rescaling as well as potential format conversion.
+  // The buffer dimensions need to be rounded to nearest multiple of JPEG DCT
+  // size, however we pass the viewport corresponding to size of the stream so
+  // the image will be only rendered to the area corresponding to the stream
+  // size.
+  Resolution bufferSize =
+      roundTo2DctSize(Resolution(stream->width, stream->height));
   std::shared_ptr<EglFrameBuffer> framebuffer = allocateTemporaryFramebuffer(
-      mEglDisplayContext->getEglDisplay(), stream->width, stream->height);
+      mEglDisplayContext->getEglDisplay(), bufferSize.width, bufferSize.height);
   if (framebuffer == nullptr) {
     ALOGE("Failed to allocate temporary framebuffer for JPEG compression");
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
   // Render into temporary framebuffer.
-  ndk::ScopedAStatus status = renderIntoEglFramebuffer(*framebuffer);
+  ndk::ScopedAStatus status = renderIntoEglFramebuffer(
+      *framebuffer, /*fence=*/nullptr, Rect(stream->width, stream->height));
   if (!status.isOk()) {
     ALOGE("Failed to render input texture into temporary framebuffer");
     return status;
   }
 
-  AHardwareBuffer_Planes planes_info;
-
-  int32_t rawFence = fence != nullptr ? fence->get() : -1;
-  int result = AHardwareBuffer_lockPlanes(hwBuffer.get(),
-                                          AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
-                                          rawFence, nullptr, &planes_info);
-  if (result != OK) {
-    ALOGE("%s: Failed to lock planes for BLOB buffer: %d", __func__, result);
+  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+                             fence);
+  if (planesLock.getStatus() != OK) {
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
-  std::shared_ptr<AHardwareBuffer> inHwBuffer = framebuffer->getHardwareBuffer();
-  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inHwBuffer.get());
+  std::vector<uint8_t> app1ExifData =
+      createExif(Resolution(stream->width, stream->height), resultMetadata,
+                 createThumbnail(requestSettings.thumbnailResolution,
+                                 requestSettings.thumbnailJpegQuality));
+  std::optional<size_t> compressedSize = compressJpeg(
+      stream->width, stream->height, requestSettings.jpegQuality,
+      framebuffer->getHardwareBuffer(), app1ExifData,
+      stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
 
-  bool compressionSuccess = true;
-  if (gBuffer != nullptr) {
-    android_ycbcr ycbcr;
-    if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
-      // This should never happen since we're allocating the temporary buffer
-      // with YUV420 layout above.
-      ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
-            gBuffer->getPixelFormat());
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      return cameraStatus(Status::INTERNAL_ERROR);
-    }
-
-    status_t status =
-        gBuffer->lockYCbCr(AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, &ycbcr);
-    ALOGV("Locked buffers");
-    if (status != NO_ERROR) {
-      AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-      ALOGE("%s: Failed to lock graphic buffer: %d", __func__, status);
-      return cameraStatus(Status::INTERNAL_ERROR);
-    }
-
-    compressionSuccess =
-        compressJpeg(gBuffer->getWidth(), gBuffer->getHeight(), ycbcr,
-                     stream->bufferSize, planes_info.planes[0].data);
-
-    status_t res = gBuffer->unlock();
-    if (res != NO_ERROR) {
-      ALOGE("Failed to unlock graphic buffer: %d", res);
-    }
-  } else {
-    compressionSuccess =
-        compressBlackJpeg(stream->width, stream->height, stream->bufferSize,
-                          planes_info.planes[0].data);
+  if (!compressedSize.has_value()) {
+    ALOGE("%s: Failed to compress JPEG image", __func__);
+    return cameraStatus(Status::INTERNAL_ERROR);
   }
-  AHardwareBuffer_unlock(hwBuffer.get(), nullptr);
-  ALOGV("Unlocked buffers");
-  return compressionSuccess ? ndk::ScopedAStatus::ok()
-                            : cameraStatus(Status::INTERNAL_ERROR);
+
+  CameraBlob cameraBlob{
+      .blobId = CameraBlobId::JPEG,
+      .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
+
+  memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+             (stream->bufferSize - sizeof(cameraBlob)),
+         &cameraBlob, sizeof(cameraBlob));
+
+  ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
+        __func__, compressedSize.value());
+
+  return ndk::ScopedAStatus::ok();
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoImageStreamBuffer(
@@ -518,7 +703,7 @@
 }
 
 ndk::ScopedAStatus VirtualCameraRenderThread::renderIntoEglFramebuffer(
-    EglFrameBuffer& framebuffer, sp<Fence> fence) {
+    EglFrameBuffer& framebuffer, sp<Fence> fence, std::optional<Rect> viewport) {
   ALOGV("%s", __func__);
   // Wait for fence to clear.
   if (fence != nullptr && fence->isValid()) {
@@ -532,6 +717,11 @@
   mEglDisplayContext->makeCurrent();
   framebuffer.beforeDraw();
 
+  Rect viewportRect =
+      viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
+  glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
+             viewportRect.getWidth(), viewportRect.getHeight());
+
   sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
   if (textureBuffer == nullptr) {
     // If there's no current buffer, nothing was written to the surface and
@@ -542,8 +732,12 @@
   } else {
     const bool renderSuccess =
         isYuvFormat(static_cast<PixelFormat>(textureBuffer->getPixelFormat()))
-            ? mEglTextureYuvProgram->draw(mEglSurfaceTexture->updateTexture())
-            : mEglTextureRgbProgram->draw(mEglSurfaceTexture->updateTexture());
+            ? mEglTextureYuvProgram->draw(
+                  mEglSurfaceTexture->getTextureId(),
+                  mEglSurfaceTexture->getTransformMatrix())
+            : mEglTextureRgbProgram->draw(
+                  mEglSurfaceTexture->getTextureId(),
+                  mEglSurfaceTexture->getTransformMatrix());
     if (!renderSuccess) {
       ALOGE("%s: Failed to render texture", __func__);
       return cameraStatus(Status::INTERNAL_ERROR);
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index b3aaed8..e222d5b 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,18 +17,24 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 
+#include <cstdint>
 #include <deque>
 #include <future>
 #include <memory>
 #include <thread>
+#include <vector>
 
+#include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "android/binder_auto_utils.h"
 #include "util/EglDisplayContext.h"
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/EglSurfaceTexture.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -49,11 +55,25 @@
   const sp<Fence> mFence;
 };
 
+struct RequestSettings {
+  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+  int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
+  Resolution thumbnailResolution = Resolution(0, 0);
+  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+  std::optional<FpsRange> fpsRange;
+  camera_metadata_enum_android_control_capture_intent_t captureIntent =
+      VirtualCameraDevice::kDefaultCaptureIntent;
+  std::optional<GpsCoordinates> gpsCoordinates;
+  std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+      aePrecaptureTrigger;
+};
+
 // Represents single capture request to fill set of buffers.
 class ProcessCaptureRequestTask {
  public:
   ProcessCaptureRequestTask(
-      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers);
+      int frameNumber, const std::vector<CaptureRequestBuffer>& requestBuffers,
+      const RequestSettings& RequestSettings = {});
 
   // Returns frame number corresponding to the request.
   int getFrameNumber() const;
@@ -65,9 +85,12 @@
   // so it cannot be access outside of its lifetime.
   const std::vector<CaptureRequestBuffer>& getBuffers() const;
 
+  const RequestSettings& getRequestSettings() const;
+
  private:
   const int mFrameNumber;
   const std::vector<CaptureRequestBuffer> mBuffers;
+  const RequestSettings mRequestSettings;
 };
 
 // Wraps dedicated rendering thread and rendering business with corresponding
@@ -77,14 +100,14 @@
   // Create VirtualCameraRenderThread instance:
   // * sessionContext - VirtualCameraSessionContext reference for shared access
   // to mapped buffers.
-  // * inputWidth - requested width of input surface ("virtual camera sensor")
-  // * inputHeight - requested height of input surface ("virtual camera sensor")
+  // * inputSurfaceSize - requested size of input surface.
+  // * reportedSensorSize - reported static sensor size of virtual camera.
   // * cameraDeviceCallback - callback for corresponding camera instance
   // * testMode - when set to true, test pattern is rendered to input surface
   // before each capture request is processed to simulate client input.
   VirtualCameraRenderThread(
-      VirtualCameraSessionContext& sessionContext, int inputWidth,
-      int inputHeight,
+      VirtualCameraSessionContext& sessionContext, Resolution inputSurfaceSize,
+      Resolution reportedSensorSize,
       std::shared_ptr<
           ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
           cameraDeviceCallback,
@@ -122,13 +145,21 @@
   // TODO(b/301023410) - Refactor the actual rendering logic off this class for
   // easier testability.
 
+  // Create thumbnail with specified size for current image.
+  // The compressed image size is limited by 32KiB.
+  // Returns vector with compressed thumbnail if successful,
+  // empty vector otherwise.
+  std::vector<uint8_t> createThumbnail(Resolution resolution, int quality);
+
   // Render current image to the BLOB buffer.
   // If fence is specified, this function will block until the fence is cleared
   // before writing to the buffer.
   // Always called on render thread.
-  ndk::ScopedAStatus renderIntoBlobStreamBuffer(const int streamId,
-                                                const int bufferId,
-                                                sp<Fence> fence = nullptr);
+  ndk::ScopedAStatus renderIntoBlobStreamBuffer(
+      const int streamId, const int bufferId,
+      const ::aidl::android::hardware::camera::device::CameraMetadata&
+          resultMetadata,
+      const RequestSettings& requestSettings, sp<Fence> fence = nullptr);
 
   // Render current image to the YCbCr buffer.
   // If fence is specified, this function will block until the fence is cleared
@@ -141,16 +172,17 @@
   // If fence is specified, this function will block until the fence is cleared
   // before writing to the buffer.
   // Always called on the render thread.
-  ndk::ScopedAStatus renderIntoEglFramebuffer(EglFrameBuffer& framebuffer,
-                                              sp<Fence> fence = nullptr);
+  ndk::ScopedAStatus renderIntoEglFramebuffer(
+      EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
+      std::optional<Rect> viewport = std::nullopt);
 
   // Camera callback
   const std::shared_ptr<
       ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
       mCameraDeviceCallback;
 
-  const int mInputSurfaceWidth;
-  const int mInputSurfaceHeight;
+  const Resolution mInputSurfaceSize;
+  const Resolution mReportedSensorSize;
   const int mTestMode;
 
   VirtualCameraSessionContext& mSessionContext;
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 7907cdb..48b4410 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,19 +18,28 @@
 #define LOG_TAG "VirtualCameraService"
 #include "VirtualCameraService.h"
 
+#include <algorithm>
+#include <array>
 #include <cinttypes>
 #include <cstdint>
-#include <cstdio>
 #include <memory>
 #include <mutex>
+#include <optional>
+#include <regex>
+#include <variant>
 
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraProvider.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
+#include "aidl/android/companion/virtualcamera/LensFacing.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "android/binder_auto_utils.h"
 #include "android/binder_libbinder.h"
+#include "android/binder_status.h"
 #include "binder/Status.h"
+#include "fmt/format.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglUtil.h"
 #include "util/Permissions.h"
 #include "util/Util.h"
 
@@ -41,23 +50,43 @@
 namespace virtualcamera {
 
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 
+// TODO(b/301023410) Make camera id range configurable / dynamic
+// based on already registered devices.
+std::atomic_int VirtualCameraService::sNextId{1000};
+
 namespace {
 
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
+constexpr int kMaxFps = 60;
+constexpr int kDefaultDeviceId = 0;
 constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
 constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
+constexpr char kHelp[] = "help";
 constexpr char kShellCmdHelp[] = R"(
+Usage:
+   cmd virtual_camera command [--option=value]
 Available commands:
  * enable_test_camera
+     Options:
+       --camera_id=(ID) - override numerical ID for test camera instance
+       --lens_facing=(front|back|external) - specifies lens facing for test camera instance
  * disable_test_camera
 )";
 constexpr char kCreateVirtualDevicePermission[] =
     "android.permission.CREATE_VIRTUAL_DEVICE";
 
+constexpr std::array<const char*, 3> kRequiredEglExtensions = {
+    "GL_OES_EGL_image_external",
+    "GL_OES_EGL_image_external_essl3",
+    "GL_EXT_YUV_target",
+};
+
 ndk::ScopedAStatus validateConfiguration(
     const VirtualCameraConfiguration& configuration) {
   if (configuration.supportedStreamConfigs.empty()) {
@@ -69,13 +98,106 @@
   for (const SupportedStreamConfiguration& config :
        configuration.supportedStreamConfigs) {
     if (!isFormatSupportedForInput(config.width, config.height,
-                                   config.pixelFormat)) {
+                                   config.pixelFormat, config.maxFps)) {
       ALOGE("%s: Requested unsupported input format: %d x %d (%d)", __func__,
             config.width, config.height, static_cast<int>(config.pixelFormat));
       return ndk::ScopedAStatus::fromServiceSpecificError(
           Status::EX_ILLEGAL_ARGUMENT);
     }
   }
+
+  if (configuration.sensorOrientation != SensorOrientation::ORIENTATION_0 &&
+      configuration.sensorOrientation != SensorOrientation::ORIENTATION_90 &&
+      configuration.sensorOrientation != SensorOrientation::ORIENTATION_180 &&
+      configuration.sensorOrientation != SensorOrientation::ORIENTATION_270) {
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
+  if (configuration.lensFacing != LensFacing::FRONT &&
+      configuration.lensFacing != LensFacing::BACK &&
+      configuration.lensFacing != LensFacing::EXTERNAL) {
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
+  return ndk::ScopedAStatus::ok();
+}
+
+enum class Command {
+  ENABLE_TEST_CAMERA,
+  DISABLE_TEST_CAMERA,
+  HELP,
+};
+
+struct CommandWithOptions {
+  Command command;
+  std::map<std::string, std::string> optionToValueMap;
+};
+
+std::optional<int> parseInt(const std::string& s) {
+  if (!std::all_of(s.begin(), s.end(), [](char c) { return std::isdigit(c); })) {
+    return std::nullopt;
+  }
+  int ret = atoi(s.c_str());
+  return ret > 0 ? std::optional(ret) : std::nullopt;
+}
+
+std::optional<LensFacing> parseLensFacing(const std::string& s) {
+  static const std::map<std::string, LensFacing> strToLensFacing{
+      {"front", LensFacing::FRONT},
+      {"back", LensFacing::BACK},
+      {"external", LensFacing::EXTERNAL}};
+  auto it = strToLensFacing.find(s);
+  return it == strToLensFacing.end() ? std::nullopt : std::optional(it->second);
+}
+
+std::variant<CommandWithOptions, std::string> parseCommand(
+    const char** args, const uint32_t numArgs) {
+  static const std::regex optionRegex("^--(\\w+)(?:=(.+))?$");
+  static const std::map<std::string, Command> strToCommand{
+      {kHelp, Command::HELP},
+      {kEnableTestCameraCmd, Command::ENABLE_TEST_CAMERA},
+      {kDisableTestCameraCmd, Command::DISABLE_TEST_CAMERA}};
+
+  if (numArgs < 1) {
+    return CommandWithOptions{.command = Command::HELP};
+  }
+
+  // We interpret the first argument as command;
+  auto it = strToCommand.find(args[0]);
+  if (it == strToCommand.end()) {
+    return "Unknown command: " + std::string(args[0]);
+  }
+
+  CommandWithOptions cmd{.command = it->second};
+
+  for (int i = 1; i < numArgs; i++) {
+    std::cmatch cm;
+    if (!std::regex_match(args[i], cm, optionRegex)) {
+      return "Not an option: " + std::string(args[i]);
+    }
+
+    cmd.optionToValueMap[cm[1]] = cm[2];
+  }
+
+  return cmd;
+};
+
+ndk::ScopedAStatus verifyRequiredEglExtensions() {
+  EglDisplayContext context;
+  for (const char* eglExtension : kRequiredEglExtensions) {
+    if (!isGlExtensionSupported(eglExtension)) {
+      ALOGE("%s not supported", eglExtension);
+      return ndk::ScopedAStatus::fromExceptionCodeWithMessage(
+          EX_UNSUPPORTED_OPERATION,
+          fmt::format(
+              "Cannot create virtual camera, because required EGL extension {} "
+              "is not supported on this system",
+              eglExtension)
+              .c_str());
+    }
+  }
   return ndk::ScopedAStatus::ok();
 }
 
@@ -90,7 +212,15 @@
 
 ndk::ScopedAStatus VirtualCameraService::registerCamera(
     const ::ndk::SpAIBinder& token,
-    const VirtualCameraConfiguration& configuration, bool* _aidl_return) {
+    const VirtualCameraConfiguration& configuration, const int32_t deviceId,
+    bool* _aidl_return) {
+  return registerCamera(token, configuration, sNextId++, deviceId, _aidl_return);
+}
+
+ndk::ScopedAStatus VirtualCameraService::registerCamera(
+    const ::ndk::SpAIBinder& token,
+    const VirtualCameraConfiguration& configuration, const int cameraId,
+    const int32_t deviceId, bool* _aidl_return) {
   if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
     ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
           getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -102,7 +232,13 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
-  *_aidl_return = true;
+  if (mVerifyEglExtensions) {
+    auto status = verifyRequiredEglExtensions();
+    if (!status.isOk()) {
+      *_aidl_return = false;
+      return status;
+    }
+  }
 
   auto status = validateConfiguration(configuration);
   if (!status.isOk()) {
@@ -121,10 +257,8 @@
     return ndk::ScopedAStatus::ok();
   }
 
-  // TODO(b/301023410) Validate configuration and pass it to the camera.
   std::shared_ptr<VirtualCameraDevice> camera =
-      mVirtualCameraProvider->createCamera(configuration.supportedStreamConfigs,
-                                           configuration.virtualCameraCallback);
+      mVirtualCameraProvider->createCamera(configuration, cameraId, deviceId);
   if (camera == nullptr) {
     ALOGE("Failed to create camera for binder token 0x%" PRIxPTR,
           reinterpret_cast<uintptr_t>(token.get()));
@@ -134,6 +268,7 @@
   }
 
   mTokenToCameraName[token] = camera->getCameraName();
+  *_aidl_return = true;
   return ndk::ScopedAStatus::ok();
 }
 
@@ -163,7 +298,7 @@
 }
 
 ndk::ScopedAStatus VirtualCameraService::getCameraId(
-        const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
+    const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
   if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
     ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
           getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -172,7 +307,7 @@
 
   if (_aidl_return == nullptr) {
     return ndk::ScopedAStatus::fromServiceSpecificError(
-            Status::EX_ILLEGAL_ARGUMENT);
+        Status::EX_ILLEGAL_ARGUMENT);
   }
 
   auto camera = getCamera(token);
@@ -204,8 +339,7 @@
   return mVirtualCameraProvider->getCamera(it->second);
 }
 
-binder_status_t VirtualCameraService::handleShellCommand(int in, int out,
-                                                         int err,
+binder_status_t VirtualCameraService::handleShellCommand(int, int out, int err,
                                                          const char** args,
                                                          uint32_t numArgs) {
   if (numArgs <= 0) {
@@ -214,27 +348,68 @@
     return STATUS_OK;
   }
 
-  if (args == nullptr || args[0] == nullptr) {
+  auto isNullptr = [](const char* ptr) { return ptr == nullptr; };
+  if (args == nullptr || std::any_of(args, args + numArgs, isNullptr)) {
     return STATUS_BAD_VALUE;
   }
-  const char* const cmd = args[0];
-  if (strcmp(kEnableTestCameraCmd, cmd) == 0) {
-    enableTestCameraCmd(in, err);
-  } else if (strcmp(kDisableTestCameraCmd, cmd) == 0) {
-    disableTestCameraCmd(in);
-  } else {
-    dprintf(out, kShellCmdHelp);
+
+  std::variant<CommandWithOptions, std::string> cmdOrErrorMessage =
+      parseCommand(args, numArgs);
+  if (std::holds_alternative<std::string>(cmdOrErrorMessage)) {
+    dprintf(err, "Error: %s\n",
+            std::get<std::string>(cmdOrErrorMessage).c_str());
+    return STATUS_BAD_VALUE;
   }
 
+  const CommandWithOptions& cmd =
+      std::get<CommandWithOptions>(cmdOrErrorMessage);
+  binder_status_t status = STATUS_OK;
+  switch (cmd.command) {
+    case Command::HELP:
+      dprintf(out, kShellCmdHelp);
+      break;
+    case Command::ENABLE_TEST_CAMERA:
+      status = enableTestCameraCmd(out, err, cmd.optionToValueMap);
+      break;
+    case Command::DISABLE_TEST_CAMERA:
+      disableTestCameraCmd(out);
+      break;
+  }
+
+  fsync(err);
   fsync(out);
-  return STATUS_OK;
+  return status;
 }
 
-void VirtualCameraService::enableTestCameraCmd(const int out, const int err) {
+binder_status_t VirtualCameraService::enableTestCameraCmd(
+    const int out, const int err,
+    const std::map<std::string, std::string>& options) {
   if (mTestCameraToken != nullptr) {
-    dprintf(out, "Test camera is already enabled (%s).",
+    dprintf(out, "Test camera is already enabled (%s).\n",
             getCamera(mTestCameraToken)->getCameraName().c_str());
-    return;
+    return STATUS_OK;
+  }
+
+  std::optional<int> cameraId;
+  auto it = options.find("camera_id");
+  if (it != options.end()) {
+    cameraId = parseInt(it->second);
+    if (!cameraId.has_value()) {
+      dprintf(err, "Invalid camera_id: %s\n, must be number > 0",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
+  std::optional<LensFacing> lensFacing;
+  it = options.find("lens_facing");
+  if (it != options.end()) {
+    lensFacing = parseLensFacing(it->second);
+    if (!lensFacing.has_value()) {
+      dprintf(err, "Invalid lens_facing: %s\n, must be front|back|external",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
   }
 
   sp<BBinder> token = sp<BBinder>::make();
@@ -242,15 +417,20 @@
 
   bool ret;
   VirtualCameraConfiguration configuration;
-  configuration.supportedStreamConfigs.push_back(
-      {.width = kVgaWidth, .height = kVgaHeight, Format::YUV_420_888});
-  registerCamera(mTestCameraToken, configuration, &ret);
+  configuration.supportedStreamConfigs.push_back({.width = kVgaWidth,
+                                                  .height = kVgaHeight,
+                                                  Format::YUV_420_888,
+                                                  .maxFps = kMaxFps});
+  configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
+  registerCamera(mTestCameraToken, configuration, cameraId.value_or(sNextId++),
+                 kDefaultDeviceId, &ret);
   if (ret) {
-    dprintf(out, "Successfully registered test camera %s",
+    dprintf(out, "Successfully registered test camera %s\n",
             getCamera(mTestCameraToken)->getCameraName().c_str());
   } else {
-    dprintf(err, "Failed to create test camera");
+    dprintf(err, "Failed to create test camera\n");
   }
+  return STATUS_OK;
 }
 
 void VirtualCameraService::disableTestCameraCmd(const int out) {
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index d573986..2121d64 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -43,7 +43,14 @@
       const ::ndk::SpAIBinder& token,
       const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
           configuration,
-      bool* _aidl_return) override EXCLUDES(mLock);
+      int32_t deviceId, bool* _aidl_return) override EXCLUDES(mLock);
+
+  // Register camera corresponding to the binder token.
+  ndk::ScopedAStatus registerCamera(
+      const ::ndk::SpAIBinder& token,
+      const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
+          configuration,
+      int cameraId, int32_t deviceId, bool* _aidl_return) EXCLUDES(mLock);
 
   // Unregisters camera corresponding to the binder token.
   ndk::ScopedAStatus unregisterCamera(const ::ndk::SpAIBinder& token) override
@@ -62,14 +69,21 @@
   binder_status_t handleShellCommand(int in, int out, int err, const char** args,
                                      uint32_t numArgs) override;
 
+  // Do not verify presence on required EGL extensions when registering virtual
+  // camera. Only to be used by unit tests.
+  void disableEglVerificationForTest() {
+    mVerifyEglExtensions = false;
+  }
+
  private:
   // Create and enable test camera instance if there's none.
-  void enableTestCameraCmd(int out, int err);
+  binder_status_t enableTestCameraCmd(
+      int out, int err, const std::map<std::string, std::string>& options);
   // Disable and destroy test camera instance if there's one.
   void disableTestCameraCmd(int out);
 
   std::shared_ptr<VirtualCameraProvider> mVirtualCameraProvider;
-
+  bool mVerifyEglExtensions = true;
   const PermissionsProxy& mPermissionProxy;
 
   std::mutex mLock;
@@ -84,6 +98,9 @@
 
   // Local binder token for test camera instance, or nullptr if there's none.
   ::ndk::SpAIBinder mTestCameraToken;
+
+  // Numerical id to assign to next created camera.
+  static std::atomic_int sNextId;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 47780d8..28fa495 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -18,14 +18,17 @@
 #define LOG_TAG "VirtualCameraSession"
 #include "VirtualCameraSession.h"
 
+#include <algorithm>
 #include <atomic>
 #include <chrono>
+#include <cmath>
 #include <cstddef>
 #include <cstdint>
 #include <cstring>
 #include <map>
 #include <memory>
 #include <mutex>
+#include <numeric>
 #include <optional>
 #include <tuple>
 #include <unordered_set>
@@ -37,13 +40,17 @@
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraRenderThread.h"
 #include "VirtualCameraStream.h"
+#include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BufferCache.h"
 #include "aidl/android/hardware/camera/device/BufferStatus.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/CaptureRequest.h"
 #include "aidl/android/hardware/camera/device/HalStream.h"
 #include "aidl/android/hardware/camera/device/NotifyMsg.h"
+#include "aidl/android/hardware/camera/device/RequestTemplate.h"
 #include "aidl/android/hardware/camera/device/ShutterMsg.h"
+#include "aidl/android/hardware/camera/device/Stream.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
 #include "aidl/android/hardware/camera/device/StreamRotation.h"
@@ -58,7 +65,7 @@
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 #include "util/TestPatternHelper.h"
 #include "util/Util.h"
 
@@ -68,6 +75,7 @@
 
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
+using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BufferCache;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
@@ -96,36 +104,75 @@
 
 // Size of request/result metadata fast message queue.
 // Setting to 0 to always disables FMQ.
-static constexpr size_t kMetadataMsgQueueSize = 0;
+constexpr size_t kMetadataMsgQueueSize = 0;
 
 // Maximum number of buffers to use per single stream.
-static constexpr size_t kMaxStreamBuffers = 2;
+constexpr size_t kMaxStreamBuffers = 2;
 
-CameraMetadata createDefaultRequestSettings(RequestTemplate type) {
-  hardware::camera::common::V1_0::helper::CameraMetadata metadataHelper;
+// Thumbnail size (0,0) correspods to disabling thumbnail.
+const Resolution kDefaultJpegThumbnailSize(0, 0);
 
-  camera_metadata_enum_android_control_capture_intent_t intent =
-      ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+camera_metadata_enum_android_control_capture_intent_t requestTemplateToIntent(
+    const RequestTemplate type) {
   switch (type) {
     case RequestTemplate::PREVIEW:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
     case RequestTemplate::STILL_CAPTURE:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
     case RequestTemplate::VIDEO_RECORD:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
     case RequestTemplate::VIDEO_SNAPSHOT:
-      intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
-      break;
+      return ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
     default:
-      // Leave default.
-      break;
+      // Return PREVIEW by default
+      return ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   }
+}
 
-  auto metadata = MetadataBuilder().setControlCaptureIntent(intent).build();
-  return (metadata != nullptr) ? std::move(*metadata) : CameraMetadata();
+int getMaxFps(const std::vector<SupportedStreamConfiguration>& configs) {
+  return std::transform_reduce(
+      configs.begin(), configs.end(), 0,
+      [](const int a, const int b) { return std::max(a, b); },
+      [](const SupportedStreamConfiguration& config) { return config.maxFps; });
+}
+
+CameraMetadata createDefaultRequestSettings(
+    const RequestTemplate type,
+    const std::vector<SupportedStreamConfiguration>& inputConfigs) {
+  int maxFps = getMaxFps(inputConfigs);
+  auto metadata =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlCaptureIntent(requestTemplateToIntent(type))
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAeExposureCompensation(0)
+          .setControlAeTargetFpsRange(FpsRange{maxFps, maxFps})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
+          .setControlAePrecaptureTrigger(
+              ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setJpegQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailQuality(VirtualCameraDevice::kDefaultJpegQuality)
+          .setJpegThumbnailSize(0, 0)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .build();
+  if (metadata == nullptr) {
+    ALOGE("%s: Failed to construct metadata for default request type %s",
+          __func__, toString(type).c_str());
+    return CameraMetadata();
+  } else {
+    ALOGV("%s: Successfully created metadata for request type %s", __func__,
+          toString(type).c_str());
+  }
+  return *metadata;
 }
 
 HalStream getHalStream(const Stream& stream) {
@@ -150,6 +197,78 @@
   return halStream;
 }
 
+Stream getHighestResolutionStream(const std::vector<Stream>& streams) {
+  return *(std::max_element(streams.begin(), streams.end(),
+                            [](const Stream& a, const Stream& b) {
+                              return a.width * a.height < b.width * b.height;
+                            }));
+}
+
+Resolution resolutionFromStream(const Stream& stream) {
+  return Resolution(stream.width, stream.height);
+}
+
+Resolution resolutionFromInputConfig(
+    const SupportedStreamConfiguration& inputConfig) {
+  return Resolution(inputConfig.width, inputConfig.height);
+}
+
+std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
+    const std::vector<Stream>& requestedStreams,
+    const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+  Stream maxResolutionStream = getHighestResolutionStream(requestedStreams);
+  Resolution maxResolution = resolutionFromStream(maxResolutionStream);
+
+  // Find best fitting stream to satisfy all requested streams:
+  // Best fitting => same or higher resolution as input with lowest pixel count
+  // difference and same aspect ratio.
+  auto isBetterInputConfig = [maxResolution](
+                                 const SupportedStreamConfiguration& configA,
+                                 const SupportedStreamConfiguration& configB) {
+    int maxResPixelCount = maxResolution.width * maxResolution.height;
+    int pixelCountDiffA =
+        std::abs((configA.width * configA.height) - maxResPixelCount);
+    int pixelCountDiffB =
+        std::abs((configB.width * configB.height) - maxResPixelCount);
+
+    return pixelCountDiffA < pixelCountDiffB;
+  };
+
+  std::optional<SupportedStreamConfiguration> bestConfig;
+  for (const SupportedStreamConfiguration& inputConfig : supportedInputConfigs) {
+    Resolution inputConfigResolution = resolutionFromInputConfig(inputConfig);
+    if (inputConfigResolution < maxResolution ||
+        !isApproximatellySameAspectRatio(inputConfigResolution, maxResolution)) {
+      // We don't want to upscale from lower resolution, or use different aspect
+      // ratio, skip.
+      continue;
+    }
+
+    if (!bestConfig.has_value() ||
+        isBetterInputConfig(inputConfig, bestConfig.value())) {
+      bestConfig = inputConfig;
+    }
+  }
+
+  return bestConfig;
+}
+
+RequestSettings createSettingsFromMetadata(const CameraMetadata& metadata) {
+  return RequestSettings{
+      .jpegQuality = getJpegQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .jpegOrientation = getJpegOrientation(metadata),
+      .thumbnailResolution =
+          getJpegThumbnailSize(metadata).value_or(Resolution(0, 0)),
+      .thumbnailJpegQuality = getJpegThumbnailQuality(metadata).value_or(
+          VirtualCameraDevice::kDefaultJpegQuality),
+      .fpsRange = getFpsRange(metadata),
+      .captureIntent = getCaptureIntent(metadata).value_or(
+          ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
+      .gpsCoordinates = getGpsCoordinates(metadata),
+      .aePrecaptureTrigger = getPrecaptureTrigger(metadata)};
+}
+
 }  // namespace
 
 VirtualCameraSession::VirtualCameraSession(
@@ -215,15 +334,13 @@
   halStreams.clear();
   halStreams.resize(in_requestedConfiguration.streams.size());
 
-  sp<Surface> inputSurface = nullptr;
-  int inputWidth;
-  int inputHeight;
-
   if (!virtualCamera->isStreamCombinationSupported(in_requestedConfiguration)) {
     ALOGE("%s: Requested stream configuration is not supported", __func__);
     return cameraStatus(Status::ILLEGAL_ARGUMENT);
   }
 
+  sp<Surface> inputSurface = nullptr;
+  std::optional<SupportedStreamConfiguration> inputConfig;
   {
     std::lock_guard<std::mutex> lock(mLock);
     for (int i = 0; i < in_requestedConfiguration.streams.size(); ++i) {
@@ -233,13 +350,21 @@
       }
     }
 
-    inputWidth = streams[0].width;
-    inputHeight = streams[0].height;
+    inputConfig = pickInputConfigurationForStreams(
+        streams, virtualCamera->getInputConfigs());
+    if (!inputConfig.has_value()) {
+      ALOGE(
+          "%s: Failed to pick any input configuration for stream configuration "
+          "request: %s",
+          __func__, in_requestedConfiguration.toString().c_str());
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
+    }
     if (mRenderThread == nullptr) {
       // If there's no client callback, start camera in test mode.
       const bool testMode = mVirtualCameraClientCallback == nullptr;
       mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-          mSessionContext, inputWidth, inputHeight, mCameraDeviceCallback,
+          mSessionContext, resolutionFromInputConfig(*inputConfig),
+          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
           testMode);
       mRenderThread->start();
       inputSurface = mRenderThread->getInputSurface();
@@ -252,10 +377,9 @@
     // create single texture.
     mVirtualCameraClientCallback->onStreamConfigured(
         /*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
-        inputWidth, inputHeight, Format::YUV_420_888);
+        inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
   }
 
-  mFirstRequest.store(true);
   return ndk::ScopedAStatus::ok();
 }
 
@@ -263,12 +387,22 @@
     RequestTemplate in_type, CameraMetadata* _aidl_return) {
   ALOGV("%s: type %d", __func__, static_cast<int32_t>(in_type));
 
+  std::shared_ptr<VirtualCameraDevice> camera = mCameraDevice.lock();
+  if (camera == nullptr) {
+    ALOGW(
+        "%s: constructDefaultRequestSettings called on already unregistered "
+        "camera",
+        __func__);
+    return cameraStatus(Status::CAMERA_DISCONNECTED);
+  }
+
   switch (in_type) {
     case RequestTemplate::PREVIEW:
     case RequestTemplate::STILL_CAPTURE:
     case RequestTemplate::VIDEO_RECORD:
     case RequestTemplate::VIDEO_SNAPSHOT: {
-      *_aidl_return = createDefaultRequestSettings(in_type);
+      *_aidl_return =
+          createDefaultRequestSettings(in_type, camera->getInputConfigs());
       return ndk::ScopedAStatus::ok();
     }
     case RequestTemplate::MANUAL:
@@ -385,15 +519,27 @@
 
 ndk::ScopedAStatus VirtualCameraSession::processCaptureRequest(
     const CaptureRequest& request) {
-  ALOGD("%s: request: %s", __func__, request.toString().c_str());
-
-  if (mFirstRequest.exchange(false) && request.settings.metadata.empty()) {
-    return cameraStatus(Status::ILLEGAL_ARGUMENT);
-  }
+  ALOGV("%s: request: %s", __func__, request.toString().c_str());
 
   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
+  RequestSettings requestSettings;
   {
     std::lock_guard<std::mutex> lock(mLock);
+
+    // If metadata it empty, last received metadata applies, if  it's non-empty
+    // update it.
+    if (!request.settings.metadata.empty()) {
+      mCurrentRequestMetadata = request.settings;
+    }
+
+    // We don't have any metadata for this request - this means we received none
+    // in first request, this is an error state.
+    if (mCurrentRequestMetadata.metadata.empty()) {
+      return cameraStatus(Status::ILLEGAL_ARGUMENT);
+    }
+
+    requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
+
     cameraCallback = mCameraDeviceCallback;
   }
 
@@ -427,7 +573,7 @@
       return cameraStatus(Status::INTERNAL_ERROR);
     }
     mRenderThread->enqueueTask(std::make_unique<ProcessCaptureRequestTask>(
-        request.frameNumber, taskBuffers));
+        request.frameNumber, taskBuffers, requestSettings));
   }
 
   if (mVirtualCameraClientCallback != nullptr) {
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 82a7a34..556314f 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -25,6 +25,7 @@
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/companion/virtualcamera/IVirtualCameraCallback.h"
 #include "aidl/android/hardware/camera/device/BnCameraDeviceSession.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/ICameraDeviceCallback.h"
 #include "utils/Mutex.h"
 
@@ -138,7 +139,8 @@
       int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>;
   std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
 
-  std::atomic_bool mFirstRequest{true};
+  aidl::android::hardware::camera::device::CameraMetadata mCurrentRequestMetadata
+      GUARDED_BY(mLock);
 
   std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
 };
diff --git a/services/camera/virtualcamera/aidl/Android.bp b/services/camera/virtualcamera/aidl/Android.bp
index fdeb7f2..b3fe3ad 100644
--- a/services/camera/virtualcamera/aidl/Android.bp
+++ b/services/camera/virtualcamera/aidl/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -8,9 +9,11 @@
     unstable: true,
     srcs: [
         "android/companion/virtualcamera/Format.aidl",
+        "android/companion/virtualcamera/LensFacing.aidl",
         "android/companion/virtualcamera/IVirtualCameraCallback.aidl",
         "android/companion/virtualcamera/IVirtualCameraService.aidl",
         "android/companion/virtualcamera/VirtualCameraConfiguration.aidl",
+        "android/companion/virtualcamera/SensorOrientation.aidl",
         "android/companion/virtualcamera/SupportedStreamConfiguration.aidl",
     ],
     local_include_dir: ".",
@@ -34,6 +37,6 @@
         java: {
             enabled: true,
             platform_apis: true,
-        }
+        },
     },
 }
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl
index cbe03e9..f5a84f7 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraCallback.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -36,7 +36,8 @@
      * @param height - height of the surface.
      * @param pixelFormat - pixel format of the surface.
      */
-    void onStreamConfigured(int streamId, in Surface surface, int width, int height, in Format pixelFormat);
+    void onStreamConfigured(int streamId, in Surface surface, int width, int height,
+            in Format pixelFormat);
 
     /**
      * Called when framework requests capture. This can be used by the client as a hint
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
index bb74f5c..1bd99be 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
@@ -28,7 +28,8 @@
      * Registers a new camera with the virtual camera hal.
      * @return true if the camera was successfully registered
      */
-    boolean registerCamera(in IBinder token, in VirtualCameraConfiguration configuration);
+    boolean registerCamera(in IBinder token, in VirtualCameraConfiguration configuration,
+            int deviceId);
 
     /**
      * Unregisters the camera from the virtual camera hal. After this call the virtual camera won't
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.h b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/LensFacing.aidl
similarity index 64%
copy from services/camera/libcameraservice/utils/CameraThreadState.h
copy to services/camera/virtualcamera/aidl/android/companion/virtualcamera/LensFacing.aidl
index e1a70de..8568c91 100644
--- a/services/camera/libcameraservice/utils/CameraThreadState.h
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/LensFacing.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,16 @@
  * limitations under the License.
  */
 
-#include <stdint.h>
+package android.companion.virtualcamera;
 
-namespace android {
-class CameraThreadState {
-public:
-  static int64_t clearCallingIdentity();
-
-  static void restoreCallingIdentity(int64_t token);
-
-  static int getCallingUid();
-
-  static int getCallingPid();
-};
-
-} // android
+/**
+ * Direction that the virtual camera faces relative to the device's screen.
+ *
+ * @hide
+ */
+@Backing(type="int")
+enum LensFacing {
+    FRONT = 0,
+    BACK = 1,
+    EXTERNAL = 2,
+}
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.h b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SensorOrientation.aidl
similarity index 64%
rename from services/camera/libcameraservice/utils/CameraThreadState.h
rename to services/camera/virtualcamera/aidl/android/companion/virtualcamera/SensorOrientation.aidl
index e1a70de..ef91f00 100644
--- a/services/camera/libcameraservice/utils/CameraThreadState.h
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SensorOrientation.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,17 @@
  * limitations under the License.
  */
 
-#include <stdint.h>
+package android.companion.virtualcamera;
 
-namespace android {
-class CameraThreadState {
-public:
-  static int64_t clearCallingIdentity();
-
-  static void restoreCallingIdentity(int64_t token);
-
-  static int getCallingUid();
-
-  static int getCallingPid();
-};
-
-} // android
+/**
+ * Sensor orientation of a virtual camera.
+ *
+ * @hide
+ */
+@Backing(type="int")
+enum SensorOrientation {
+    ORIENTATION_0 = 0,
+    ORIENTATION_90 = 90,
+    ORIENTATION_180 = 180,
+    ORIENTATION_270 = 270,
+}
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl
index 7070cbd..6f86cbe 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/SupportedStreamConfiguration.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.companion.virtualcamera;
 
 import android.companion.virtualcamera.Format;
@@ -26,4 +27,5 @@
     int width;
     int height;
     Format pixelFormat = Format.UNKNOWN;
+    int maxFps;
 }
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl
index c1a2f22..887ad26 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/VirtualCameraConfiguration.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,9 +13,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package android.companion.virtualcamera;
 
 import android.companion.virtualcamera.IVirtualCameraCallback;
+import android.companion.virtualcamera.LensFacing;
+import android.companion.virtualcamera.SensorOrientation;
 import android.companion.virtualcamera.SupportedStreamConfiguration;
 
 /**
@@ -26,4 +29,6 @@
 parcelable VirtualCameraConfiguration {
     SupportedStreamConfiguration[] supportedStreamConfigs;
     IVirtualCameraCallback virtualCameraCallback;
+    SensorOrientation sensorOrientation = SensorOrientation.ORIENTATION_0;
+    LensFacing lensFacing;
 }
diff --git a/services/camera/virtualcamera/flags/Android.bp b/services/camera/virtualcamera/flags/Android.bp
new file mode 100644
index 0000000..5fa8852
--- /dev/null
+++ b/services/camera/virtualcamera/flags/Android.bp
@@ -0,0 +1,37 @@
+package {
+    default_team: "trendy_team_xr_framework",
+}
+
+soong_config_module_type {
+    name: "virtual_device_build_flags_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "vdm",
+    bool_variables: [
+        "virtual_camera_service_enabled",
+    ],
+    properties: [
+        "cflags",
+    ],
+}
+
+soong_config_bool_variable {
+    name: "virtual_camera_service_enabled",
+}
+
+virtual_device_build_flags_cc_defaults {
+    name: "virtual_device_build_flags_defaults",
+    soong_config_variables: {
+        virtual_camera_service_enabled: {
+            cflags: ["-DVIRTUAL_CAMERA_SERVICE_ENABLED=1"],
+        },
+    },
+}
+
+cc_library_static {
+    name: "libvirtualdevicebuildflags",
+    srcs: [
+        "android_companion_virtualdevice_build_flags.cc",
+    ],
+    export_include_dirs: ["."],
+    defaults: ["virtual_device_build_flags_defaults"],
+}
diff --git a/services/camera/libcameraservice/utils/CameraThreadState.h b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
similarity index 62%
copy from services/camera/libcameraservice/utils/CameraThreadState.h
copy to services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
index e1a70de..5525bc9 100644
--- a/services/camera/libcameraservice/utils/CameraThreadState.h
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,20 @@
  * limitations under the License.
  */
 
-#include <stdint.h>
-
 namespace android {
-class CameraThreadState {
-public:
-  static int64_t clearCallingIdentity();
+namespace companion {
+namespace virtualdevice {
+namespace flags {
 
-  static void restoreCallingIdentity(int64_t token);
+bool virtual_camera_service_build_flag() {
+#if VIRTUAL_CAMERA_SERVICE_ENABLED
+  return true;
+#else
+  return false;
+#endif
+}
 
-  static int getCallingUid();
-
-  static int getCallingPid();
-};
-
-} // android
+}  // namespace flags
+}  // namespace virtualdevice
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
new file mode 100644
index 0000000..718ce9b
--- /dev/null
+++ b/services/camera/virtualcamera/flags/android_companion_virtualdevice_build_flags.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace android {
+namespace companion {
+namespace virtualdevice {
+namespace flags {
+
+// Returns true if the virtual camera service is enabled
+// in the build.
+//
+// TODO(b/309090563) - Deprecate in favor of autogened library to query build
+// flags once available.
+bool virtual_camera_service_build_flag();
+
+}  // namespace flags
+}  // namespace virtualdevice
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/fuzzer/Android.bp b/services/camera/virtualcamera/fuzzer/Android.bp
index 71e8f50..6a72167 100644
--- a/services/camera/virtualcamera/fuzzer/Android.bp
+++ b/services/camera/virtualcamera/fuzzer/Android.bp
@@ -15,7 +15,8 @@
  * limitations under the License.
  *
  *****************************************************************************/
- package {
+package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
diff --git a/services/camera/virtualcamera/tests/Android.bp b/services/camera/virtualcamera/tests/Android.bp
index bc46ba0..543cc10 100644
--- a/services/camera/virtualcamera/tests/Android.bp
+++ b/services/camera/virtualcamera/tests/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_xr_framework",
     // See: http://go/android-license-faq
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -14,11 +15,14 @@
         "libgtest",
         "libgmock",
     ],
-    srcs: ["EglUtilTest.cc",
-           "VirtualCameraDeviceTest.cc",
-           "VirtualCameraProviderTest.cc",
-           "VirtualCameraRenderThreadTest.cc",
-           "VirtualCameraServiceTest.cc",
-           "VirtualCameraSessionTest.cc"],
+    srcs: [
+        "EglUtilTest.cc",
+        "JpegUtilTest.cc",
+        "VirtualCameraDeviceTest.cc",
+        "VirtualCameraProviderTest.cc",
+        "VirtualCameraRenderThreadTest.cc",
+        "VirtualCameraServiceTest.cc",
+        "VirtualCameraSessionTest.cc",
+    ],
     test_suites: ["device-tests"],
 }
diff --git a/services/camera/virtualcamera/tests/JpegUtilTest.cc b/services/camera/virtualcamera/tests/JpegUtilTest.cc
new file mode 100644
index 0000000..e6481f0
--- /dev/null
+++ b/services/camera/virtualcamera/tests/JpegUtilTest.cc
@@ -0,0 +1,199 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/types.h>
+
+#include "system/graphics.h"
+#define LOG_TAG "JpegUtilTest"
+
+#include <array>
+#include <cstdint>
+#include <cstring>
+
+#include "android/hardware_buffer.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "jpeglib.h"
+#include "util/JpegUtil.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+namespace {
+
+using testing::Eq;
+using testing::Gt;
+using testing::Optional;
+using testing::VariantWith;
+
+constexpr int kOutputBufferSize = 1024 * 1024;  // 1 MiB.
+constexpr int kJpegQuality = 80;
+
+// Create black YUV420 buffer for testing purposes.
+std::shared_ptr<AHardwareBuffer> createHardwareBufferForTest(const int width,
+                                                             const int height) {
+  const AHardwareBuffer_Desc desc{.width = static_cast<uint32_t>(width),
+                                  .height = static_cast<uint32_t>(height),
+                                  .layers = 1,
+                                  .format = AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+                                  .usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+                                  .stride = 0,
+                                  .rfu0 = 0,
+                                  .rfu1 = 0};
+
+  AHardwareBuffer* hwBufferPtr;
+  int status = AHardwareBuffer_allocate(&desc, &hwBufferPtr);
+  if (status != NO_ERROR) {
+    ALOGE(
+        "%s: Failed to allocate hardware buffer for temporary framebuffer: %d",
+        __func__, status);
+    return nullptr;
+  }
+
+  std::shared_ptr<AHardwareBuffer> hwBuffer(hwBufferPtr,
+                                            AHardwareBuffer_release);
+
+  YCbCrLockGuard yCbCrLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN);
+  const android_ycbcr& ycbr = (*yCbCrLock);
+
+  uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
+  for (int r = 0; r < height; r++) {
+    memset(y + r * ycbr.ystride, 0x00, width);
+  }
+
+  uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
+  uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
+  for (int r = 0; r < height / 2; r++) {
+    for (int c = 0; c < width / 2; c++) {
+      cb[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+      cr[r * ycbr.cstride + c * ycbr.chroma_step] = 0xff / 2;
+    }
+  }
+
+  return hwBuffer;
+}
+
+// Decode JPEG header, return image resolution on success or error message on error.
+std::variant<std::string, Resolution> verifyHeaderAndGetResolution(
+    const uint8_t* data, int size) {
+  struct jpeg_decompress_struct ctx;
+  struct jpeg_error_mgr jerr;
+
+  struct DecompressionError {
+    bool success = true;
+    std::string error;
+  } result;
+
+  ctx.client_data = &result;
+
+  ctx.err = jpeg_std_error(&jerr);
+  ctx.err->error_exit = [](j_common_ptr cinfo) {
+    reinterpret_cast<DecompressionError*>(cinfo->client_data)->success = false;
+  };
+  ctx.err->output_message = [](j_common_ptr cinfo) {
+    char buffer[JMSG_LENGTH_MAX];
+    (*cinfo->err->format_message)(cinfo, buffer);
+    reinterpret_cast<DecompressionError*>(cinfo->client_data)->error = buffer;
+    ALOGE("libjpeg error: %s", buffer);
+  };
+
+  jpeg_create_decompress(&ctx);
+  jpeg_mem_src(&ctx, data, size);
+  jpeg_read_header(&ctx, /*require_image=*/true);
+
+  if (!result.success) {
+    jpeg_destroy_decompress(&ctx);
+    return result.error;
+  }
+
+  Resolution resolution(ctx.image_width, ctx.image_height);
+  jpeg_destroy_decompress(&ctx);
+  return resolution;
+}
+
+TEST(JpegUtil, roundToDctSize) {
+  EXPECT_THAT(roundTo2DctSize(Resolution(640, 480)), Eq(Resolution(640, 480)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(5, 5)), Eq(Resolution(16, 16)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(32, 32)), Eq(Resolution(32, 32)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(33, 32)), Eq(Resolution(48, 32)));
+  EXPECT_THAT(roundTo2DctSize(Resolution(32, 33)), Eq(Resolution(32, 48)));
+}
+
+class JpegUtilTest : public ::testing::Test {
+ public:
+  void SetUp() override {
+    std::fill(mOutputBuffer.begin(), mOutputBuffer.end(), 0);
+  }
+
+ protected:
+  std::optional<size_t> compress(int imageWidth, int imageHeight,
+                                 std::shared_ptr<AHardwareBuffer> inBuffer) {
+    return compressJpeg(imageWidth, imageHeight, kJpegQuality, inBuffer,
+                        /*app1ExifData=*/{}, mOutputBuffer.size(),
+                        mOutputBuffer.data());
+  }
+
+  std::array<uint8_t, kOutputBufferSize> mOutputBuffer;
+};
+
+TEST_F(JpegUtilTest, compressImageSizeAlignedWithDctSucceeds) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(640, 480);
+
+  std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+  EXPECT_THAT(compressedSize, Optional(Gt(0)));
+  EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+                                           compressedSize.value()),
+              VariantWith<Resolution>(Resolution(640, 480)));
+}
+
+TEST_F(JpegUtilTest, compressImageSizeNotAlignedWidthDctSucceeds) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(640, 480);
+
+  std::optional<size_t> compressedSize = compress(630, 470, inBuffer);
+
+  EXPECT_THAT(compressedSize, Optional(Gt(0)));
+  EXPECT_THAT(verifyHeaderAndGetResolution(mOutputBuffer.data(),
+                                           compressedSize.value()),
+              VariantWith<Resolution>(Resolution(630, 470)));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferNotAlignedWithDctFails) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(641, 480);
+
+  std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+  EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+TEST_F(JpegUtilTest, compressImageWithBufferTooSmallFails) {
+  std::shared_ptr<AHardwareBuffer> inBuffer =
+      createHardwareBufferForTest(634, 464);
+
+  std::optional<size_t> compressedSize = compress(640, 480, inBuffer);
+
+  EXPECT_THAT(compressedSize, Eq(std::nullopt));
+}
+
+}  // namespace
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 140ae65..5c9b3b9 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,18 +14,24 @@
  * limitations under the License.
  */
 
+#include <algorithm>
+#include <iterator>
 #include <memory>
 
 #include "VirtualCameraDevice.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
+#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
 #include "log/log_main.h"
 #include "system/camera_metadata.h"
+#include "util/MetadataUtil.h"
+#include "util/Util.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -34,27 +40,52 @@
 namespace {
 
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::device::Stream;
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
 using ::aidl::android::hardware::camera::device::StreamType;
 using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::testing::ElementsAre;
 using ::testing::UnorderedElementsAreArray;
 using metadata_stream_t =
     camera_metadata_enum_android_scaler_available_stream_configurations_t;
 
 constexpr int kCameraId = 42;
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
+constexpr int k360pWidth = 640;
+constexpr int k360pHeight = 360;
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kHdWidth = 1280;
 constexpr int kHdHeight = 720;
+constexpr int kMaxFps = 30;
+constexpr int kDefaultDeviceId = 0;
+
+const Stream kVgaYUV420Stream = Stream{
+    .streamType = StreamType::OUTPUT,
+    .width = kVgaWidth,
+    .height = kVgaHeight,
+    .format = PixelFormat::YCBCR_420_888,
+};
+
+const Stream kVgaJpegStream = Stream{
+    .streamType = StreamType::OUTPUT,
+    .width = kVgaWidth,
+    .height = kVgaHeight,
+    .format = PixelFormat::BLOB,
+};
 
 struct AvailableStreamConfiguration {
   const int width;
   const int height;
   const int pixelFormat;
-  const metadata_stream_t streamConfiguration;
+  const metadata_stream_t streamConfiguration =
+      ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
 };
 
 bool operator==(const AvailableStreamConfiguration& a,
@@ -96,18 +127,19 @@
 }
 
 struct VirtualCameraConfigTestParam {
-  std::vector<SupportedStreamConfiguration> inputConfig;
+  VirtualCameraConfiguration inputConfig;
   std::vector<AvailableStreamConfiguration> expectedAvailableStreamConfigs;
 };
 
-class VirtualCameraDeviceTest
+class VirtualCameraDeviceCharacterisicsTest
     : public testing::TestWithParam<VirtualCameraConfigTestParam> {};
 
-TEST_P(VirtualCameraDeviceTest, cameraCharacteristicsForInputFormat) {
+TEST_P(VirtualCameraDeviceCharacterisicsTest,
+       cameraCharacteristicsForInputFormat) {
   const VirtualCameraConfigTestParam& param = GetParam();
   std::shared_ptr<VirtualCameraDevice> camera =
       ndk::SharedRefBase::make<VirtualCameraDevice>(
-          kCameraId, param.inputConfig, /*virtualCameraClientCallback=*/nullptr);
+          kCameraId, param.inputConfig, kDefaultDeviceId);
 
   CameraMetadata metadata;
   ASSERT_TRUE(camera->getCameraCharacteristics(&metadata).isOk());
@@ -132,81 +164,204 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(
-    cameraCharacteristicsForInputFormat, VirtualCameraDeviceTest,
+    cameraCharacteristicsForInputFormat, VirtualCameraDeviceCharacterisicsTest,
     testing::Values(
         VirtualCameraConfigTestParam{
-            .inputConfig = {SupportedStreamConfiguration{
-                .width = kVgaWidth,
-                .height = kVgaHeight,
-                .pixelFormat = Format::YUV_420_888}},
+            .inputConfig =
+                VirtualCameraConfiguration{
+                    .supportedStreamConfigs = {SupportedStreamConfiguration{
+                        .width = kVgaWidth,
+                        .height = kVgaHeight,
+                        .pixelFormat = Format::YUV_420_888,
+                        .maxFps = kMaxFps}},
+                    .virtualCameraCallback = nullptr,
+                    .sensorOrientation = SensorOrientation::ORIENTATION_0,
+                    .lensFacing = LensFacing::FRONT},
             .expectedAvailableStreamConfigs =
                 {AvailableStreamConfiguration{
-                     .width = kVgaWidth,
-                     .height = kVgaHeight,
-                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                 AvailableStreamConfiguration{
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                 AvailableStreamConfiguration{
+                     .width = kQvgaWidth,
+                     .height = kQvgaHeight,
+                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
                  AvailableStreamConfiguration{
                      .width = kVgaWidth,
                      .height = kVgaHeight,
                      .pixelFormat =
-                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                         ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                  AvailableStreamConfiguration{
                      .width = kVgaWidth,
                      .height = kVgaHeight,
-                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                     .streamConfiguration =
-                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}},
+                     .pixelFormat =
+                         ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                 AvailableStreamConfiguration{
+                     .width = kVgaWidth,
+                     .height = kVgaHeight,
+                     .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}},
         VirtualCameraConfigTestParam{
-            .inputConfig = {SupportedStreamConfiguration{
-                                .width = kVgaWidth,
-                                .height = kVgaHeight,
-                                .pixelFormat = Format::YUV_420_888},
-                            SupportedStreamConfiguration{
-                                .width = kHdWidth,
-                                .height = kHdHeight,
-                                .pixelFormat = Format::YUV_420_888}},
+            .inputConfig =
+                VirtualCameraConfiguration{
+                    .supportedStreamConfigs =
+                        {SupportedStreamConfiguration{
+                             .width = kVgaWidth,
+                             .height = kVgaHeight,
+                             .pixelFormat = Format::YUV_420_888,
+                             .maxFps = kMaxFps},
+                         SupportedStreamConfiguration{
+                             .width = kHdWidth,
+                             .height = kHdHeight,
+                             .pixelFormat = Format::YUV_420_888,
+                             .maxFps = kMaxFps}},
+                    .virtualCameraCallback = nullptr,
+                    .sensorOrientation = SensorOrientation::ORIENTATION_0,
+                    .lensFacing = LensFacing::BACK},
             .expectedAvailableStreamConfigs = {
                 AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = kQvgaWidth,
+                    .height = kQvgaHeight,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = 640,
+                    .height = 360,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                 AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
                     .pixelFormat =
-                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
                 AvailableStreamConfiguration{
                     .width = kVgaWidth,
                     .height = kVgaHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat =
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
+                AvailableStreamConfiguration{
+                    .width = 1024,
+                    .height = 576,
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
                     .pixelFormat =
-                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT},
+                        ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED},
                 AvailableStreamConfiguration{
                     .width = kHdWidth,
                     .height = kHdHeight,
-                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
-                    .streamConfiguration =
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}}));
+                    .pixelFormat = ANDROID_SCALER_AVAILABLE_FORMATS_BLOB}}}));
+
+class VirtualCameraDeviceTest : public ::testing::Test {
+ public:
+  void SetUp() override {
+    mCamera = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = {SupportedStreamConfiguration{
+                .width = kVgaWidth,
+                .height = kVgaHeight,
+                .pixelFormat = Format::YUV_420_888,
+                .maxFps = kMaxFps}},
+            .virtualCameraCallback = nullptr,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT},
+        kDefaultDeviceId);
+  }
+
+ protected:
+  std::shared_ptr<VirtualCameraDevice> mCamera;
+};
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfNonStallStreamsSuceeds) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
+              kVgaYUV420Stream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyNonStallStreamsFails) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfProcessedStreams + 1,
+              kVgaYUV420Stream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfStallStreamsSuceeds) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfStallStreams, kVgaJpegStream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_TRUE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, configureTooManyStallStreamsFails) {
+  StreamConfiguration config;
+  std::fill_n(std::back_insert_iterator(config.streams),
+              VirtualCameraDevice::kMaxNumberOfStallStreams + 1, kVgaJpegStream);
+
+  bool aidl_ret;
+  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
+  EXPECT_FALSE(aidl_ret);
+}
+
+TEST_F(VirtualCameraDeviceTest, thumbnailSizeWithCompatibleAspectRatio) {
+  CameraMetadata metadata;
+  ASSERT_TRUE(mCamera->getCameraCharacteristics(&metadata).isOk());
+
+  // Camera is configured with VGA input, we expect 240 x 180 thumbnail size in
+  // characteristics, since it has same aspect ratio.
+  EXPECT_THAT(getJpegAvailableThumbnailSizes(metadata),
+              ElementsAre(Resolution(0, 0), Resolution(240, 180)));
+}
 
 }  // namespace
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
index 615a77c..f1b2a92 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -33,7 +33,10 @@
 namespace {
 
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::common::TorchModeStatus;
@@ -49,6 +52,9 @@
 
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
+constexpr int kMaxFps = 30;
+constexpr int kCameraId = 9999;
+constexpr int kDefaultDeviceId = 0;
 constexpr char kVirtualCameraNameRegex[] =
     "device@[0-9]+\\.[0-9]+/virtual/[0-9]+";
 
@@ -79,10 +85,15 @@
   std::shared_ptr<VirtualCameraProvider> mCameraProvider;
   std::shared_ptr<MockCameraProviderCallback> mMockCameraProviderCallback =
       ndk::SharedRefBase::make<MockCameraProviderCallback>();
-  std::vector<SupportedStreamConfiguration> mInputConfigs = {
-      SupportedStreamConfiguration{.width = kVgaWidth,
-                                   .height = kVgaHeight,
-                                   .pixelFormat = Format::YUV_420_888}};
+  VirtualCameraConfiguration mInputConfig = VirtualCameraConfiguration{
+      .supportedStreamConfigs = {SupportedStreamConfiguration{
+          .width = kVgaWidth,
+          .height = kVgaHeight,
+          .pixelFormat = Format::YUV_420_888,
+          .maxFps = kMaxFps}},
+      .virtualCameraCallback = nullptr,
+      .sensorOrientation = SensorOrientation::ORIENTATION_0,
+      .lensFacing = LensFacing::FRONT};
 };
 
 TEST_F(VirtualCameraProviderTest, SetNullCameraCallbackFails) {
@@ -109,7 +120,7 @@
 
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
   EXPECT_THAT(camera, Not(IsNull()));
   EXPECT_THAT(camera->getCameraName(), MatchesRegex(kVirtualCameraNameRegex));
 
@@ -127,7 +138,7 @@
       .WillOnce(Return(ndk::ScopedAStatus::ok()));
 
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
 
   // Created camera should be in the list of cameras.
@@ -139,7 +150,7 @@
 TEST_F(VirtualCameraProviderTest, RemoveCamera) {
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
 
   EXPECT_CALL(*mMockCameraProviderCallback,
               cameraDeviceStatusChange(Eq(camera->getCameraName()),
@@ -156,7 +167,7 @@
 TEST_F(VirtualCameraProviderTest, RemoveNonExistingCamera) {
   ASSERT_TRUE(mCameraProvider->setCallback(mMockCameraProviderCallback).isOk());
   std::shared_ptr<VirtualCameraDevice> camera =
-      mCameraProvider->createCamera(mInputConfigs);
+      mCameraProvider->createCamera(mInputConfig, kCameraId, kDefaultDeviceId);
 
   // Removing non-existing camera should fail.
   const std::string cameraName = "DefinitelyNoTCamera";
diff --git a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
index 5f899b8..ddcb789 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraRenderThreadTest.cc
@@ -33,6 +33,7 @@
 #include "android/binder_auto_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
@@ -62,6 +63,7 @@
 
 constexpr int kInputWidth = 640;
 constexpr int kInputHeight = 480;
+const Resolution kInputResolution(kInputWidth, kInputHeight);
 
 Matcher<StreamBuffer> IsStreamBufferWithStatus(const int streamId,
                                                const int bufferId,
@@ -102,7 +104,8 @@
     mMockCameraDeviceCallback =
         ndk::SharedRefBase::make<MockCameraDeviceCallback>();
     mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-        *mSessionContext, kInputWidth, kInputHeight, mMockCameraDeviceCallback);
+        *mSessionContext, kInputResolution,
+        /*reportedSensorSize*/ kInputResolution, mMockCameraDeviceCallback);
   }
 
  protected:
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index 38261fb..5927b05 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,8 +14,11 @@
  * limitations under the License.
  */
 
+#include <algorithm>
 #include <cstdio>
+#include <iterator>
 #include <memory>
+#include <regex>
 
 #include "VirtualCameraService.h"
 #include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
@@ -29,6 +32,7 @@
 #include "binder/Binder.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
+#include "util/MetadataUtil.h"
 #include "util/Permissions.h"
 #include "utils/Errors.h"
 
@@ -39,33 +43,48 @@
 
 using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
 using ::aidl::android::hardware::camera::common::TorchModeStatus;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
 using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
 using ::aidl::android::hardware::graphics::common::PixelFormat;
 using ::aidl::android::view::Surface;
 using ::testing::_;
+using ::testing::ElementsAre;
 using ::testing::Eq;
 using ::testing::Ge;
 using ::testing::IsEmpty;
 using ::testing::IsNull;
 using ::testing::Not;
+using ::testing::Optional;
 using ::testing::Return;
 using ::testing::SizeIs;
 
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
+constexpr int kMaxFps = 30;
+constexpr SensorOrientation kSensorOrientation =
+    SensorOrientation::ORIENTATION_0;
+constexpr LensFacing kLensFacing = LensFacing::FRONT;
+constexpr int kDefaultDeviceId = 0;
 constexpr char kCreateVirtualDevicePermissions[] =
     "android.permission.CREATE_VIRTUAL_DEVICE";
 
 const VirtualCameraConfiguration kEmptyVirtualCameraConfiguration;
 
 VirtualCameraConfiguration createConfiguration(const int width, const int height,
-                                               const Format format) {
+                                               const Format format,
+                                               const int maxFps) {
   VirtualCameraConfiguration configuration;
-  configuration.supportedStreamConfigs.push_back(
-      {.width = width, .height = height, .pixelFormat = format});
+  configuration.supportedStreamConfigs.push_back({.width = width,
+                                                  .height = height,
+                                                  .pixelFormat = format,
+                                                  .maxFps = maxFps});
+  configuration.sensorOrientation = kSensorOrientation;
+  configuration.lensFacing = kLensFacing;
   return configuration;
 }
 
@@ -99,6 +118,7 @@
     mCameraProvider->setCallback(mMockCameraProviderCallback);
     mCameraService = ndk::SharedRefBase::make<VirtualCameraService>(
         mCameraProvider, mMockPermissionsProxy);
+    mCameraService->disableEglVerificationForTest();
 
     ON_CALL(mMockPermissionsProxy, checkCallingPermission)
         .WillByDefault(Return(true));
@@ -113,8 +133,8 @@
     bool aidlRet;
 
     ASSERT_TRUE(mCameraService
-                    ->registerCamera(mNdkOwnerToken,
-                                     mVgaYUV420OnlyConfiguration, &aidlRet)
+                    ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
+                                     kDefaultDeviceId, &aidlRet)
                     .isOk());
     ASSERT_TRUE(aidlRet);
   }
@@ -123,12 +143,21 @@
     close(mDevNullFd);
   }
 
-  void execute_shell_command(const std::string cmd) {
-    std::array<const char*, 1> args{cmd.data()};
-    ASSERT_THAT(
-        mCameraService->handleShellCommand(mDevNullFd, mDevNullFd, mDevNullFd,
-                                           args.data(), args.size()),
-        Eq(NO_ERROR));
+  binder_status_t execute_shell_command(const std::string& cmd) {
+    const static std::regex whitespaceRegex("\\s+");
+    std::vector<std::string> tokens;
+    std::copy_if(
+        std::sregex_token_iterator(cmd.begin(), cmd.end(), whitespaceRegex, -1),
+        std::sregex_token_iterator(), std::back_inserter(tokens),
+        [](const std::string& token) { return !token.empty(); });
+
+    std::vector<const char*> argv;
+    argv.reserve(tokens.size());
+    std::transform(tokens.begin(), tokens.end(), std::back_inserter(argv),
+                   [](const std::string& str) { return str.c_str(); });
+
+    return mCameraService->handleShellCommand(
+        mDevNullFd, mDevNullFd, mDevNullFd, argv.data(), argv.size());
   }
 
   std::vector<std::string> getCameraIds() {
@@ -137,6 +166,17 @@
     return cameraIds;
   }
 
+  std::optional<camera_metadata_enum_android_lens_facing> getCameraLensFacing(
+      const std::string& id) {
+    std::shared_ptr<VirtualCameraDevice> camera = mCameraProvider->getCamera(id);
+    if (camera == nullptr) {
+      return std::nullopt;
+    }
+    CameraMetadata metadata;
+    camera->getCameraCharacteristics(&metadata);
+    return getLensFacing(metadata);
+  }
+
  protected:
   std::shared_ptr<VirtualCameraService> mCameraService;
   std::shared_ptr<VirtualCameraProvider> mCameraProvider;
@@ -150,7 +190,7 @@
   int mDevNullFd;
 
   VirtualCameraConfiguration mVgaYUV420OnlyConfiguration =
-      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888);
+      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, kMaxFps);
 };
 
 TEST_F(VirtualCameraServiceTest, RegisterCameraWithYuvInputSucceeds) {
@@ -158,10 +198,10 @@
   ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
   bool aidlRet;
 
-  ASSERT_TRUE(
-      mCameraService
-          ->registerCamera(ndkToken, mVgaYUV420OnlyConfiguration, &aidlRet)
-          .isOk());
+  ASSERT_TRUE(mCameraService
+                  ->registerCamera(ndkToken, mVgaYUV420OnlyConfiguration,
+                                   kDefaultDeviceId, &aidlRet)
+                  .isOk());
 
   EXPECT_TRUE(aidlRet);
   EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -173,9 +213,11 @@
   bool aidlRet;
 
   VirtualCameraConfiguration config =
-      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888);
+      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
 
-  ASSERT_TRUE(mCameraService->registerCamera(ndkToken, config, &aidlRet).isOk());
+  ASSERT_TRUE(mCameraService
+                  ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+                  .isOk());
 
   EXPECT_TRUE(aidlRet);
   EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -187,7 +229,7 @@
 
   ASSERT_TRUE(mCameraService
                   ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
-                                   &aidlRet)
+                                   kDefaultDeviceId, &aidlRet)
                   .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), SizeIs(1));
@@ -198,7 +240,8 @@
 
   ASSERT_FALSE(mCameraService
                    ->registerCamera(mNdkOwnerToken,
-                                    kEmptyVirtualCameraConfiguration, &aidlRet)
+                                    kEmptyVirtualCameraConfiguration,
+                                    kDefaultDeviceId, &aidlRet)
                    .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
@@ -208,10 +251,12 @@
   bool aidlRet;
 
   VirtualCameraConfiguration config =
-      createConfiguration(kVgaWidth, kVgaHeight, Format::UNKNOWN);
+      createConfiguration(kVgaWidth, kVgaHeight, Format::UNKNOWN, kMaxFps);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -219,21 +264,12 @@
 TEST_F(VirtualCameraServiceTest, ConfigurationWithTooHighResFails) {
   bool aidlRet;
   VirtualCameraConfiguration config =
-      createConfiguration(1000000, 1000000, Format::YUV_420_888);
+      createConfiguration(1000000, 1000000, Format::YUV_420_888, kMaxFps);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
-  EXPECT_FALSE(aidlRet);
-  EXPECT_THAT(getCameraIds(), IsEmpty());
-}
-
-TEST_F(VirtualCameraServiceTest, ConfigurationWithUnalignedResolutionFails) {
-  bool aidlRet;
-  VirtualCameraConfiguration config =
-      createConfiguration(641, 481, Format::YUV_420_888);
-
-  ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -241,10 +277,38 @@
 TEST_F(VirtualCameraServiceTest, ConfigurationWithNegativeResolutionFails) {
   bool aidlRet;
   VirtualCameraConfiguration config =
-      createConfiguration(-1, kVgaHeight, Format::YUV_420_888);
+      createConfiguration(-1, kVgaHeight, Format::YUV_420_888, kMaxFps);
 
   ASSERT_FALSE(
-      mCameraService->registerCamera(mNdkOwnerToken, config, &aidlRet).isOk());
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, ConfigurationWithTooLowMaxFpsFails) {
+  bool aidlRet;
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 0);
+
+  ASSERT_FALSE(
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, ConfigurationWithTooHighMaxFpsFails) {
+  bool aidlRet;
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::YUV_420_888, 90);
+
+  ASSERT_FALSE(
+      mCameraService
+          ->registerCamera(mNdkOwnerToken, config, kDefaultDeviceId, &aidlRet)
+          .isOk());
   EXPECT_FALSE(aidlRet);
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
@@ -278,7 +342,7 @@
 
   EXPECT_THAT(mCameraService
                   ->registerCamera(mNdkOwnerToken, mVgaYUV420OnlyConfiguration,
-                                   &aidlRet)
+                                   kDefaultDeviceId, &aidlRet)
                   .getExceptionCode(),
               Eq(EX_SECURITY));
 }
@@ -337,17 +401,61 @@
 }
 
 TEST_F(VirtualCameraServiceTest, TestCameraShellCmd) {
-  execute_shell_command("enable_test_camera");
+  EXPECT_THAT(execute_shell_command("enable_test_camera"), Eq(NO_ERROR));
 
   std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
   EXPECT_THAT(cameraIdsAfterEnable, SizeIs(1));
 
-  execute_shell_command("disable_test_camera");
+  EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
 
   std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
   EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
 }
 
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithId) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --camera_id=12345"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
+  EXPECT_THAT(cameraIdsAfterEnable, ElementsAre("device@1.1/virtual/12345"));
+
+  EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
+  EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidId) {
+  EXPECT_THAT(
+      execute_shell_command("enable_test_camera --camera_id=NotNumericalId"),
+      Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithUnknownCommand) {
+  EXPECT_THAT(execute_shell_command("brew_coffee --flavor=vanilla"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithMalformedOption) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera **camera_id=12345"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithLensFacing) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=front"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+  EXPECT_THAT(getCameraLensFacing(cameraIds[0]),
+              Optional(Eq(ANDROID_LENS_FACING_FRONT)));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidLensFacing) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=west"),
+              Eq(STATUS_BAD_VALUE));
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 30bd2b6..671e031 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
+ * Copyright 2023 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -21,6 +21,7 @@
 #include "VirtualCameraSession.h"
 #include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
 #include "aidl/android/companion/virtualcamera/SupportedStreamConfiguration.h"
+#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/BnCameraDeviceCallback.h"
 #include "aidl/android/hardware/camera/device/StreamConfiguration.h"
@@ -29,21 +30,31 @@
 #include "android/binder_interface_utils.h"
 #include "gmock/gmock.h"
 #include "gtest/gtest.h"
-#include "util/MetadataBuilder.h"
+#include "util/MetadataUtil.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 namespace {
 
-constexpr int kWidth = 640;
-constexpr int kHeight = 480;
+constexpr int kQvgaWidth = 320;
+constexpr int kQvgaHeight = 240;
+constexpr int kVgaWidth = 640;
+constexpr int kVgaHeight = 480;
+constexpr int kSvgaWidth = 800;
+constexpr int kSvgaHeight = 600;
+constexpr int kMaxFps = 30;
 constexpr int kStreamId = 0;
+constexpr int kSecondStreamId = 1;
 constexpr int kCameraId = 42;
+constexpr int kDefaultDeviceId = 0;
 
 using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::companion::virtualcamera::LensFacing;
+using ::aidl::android::companion::virtualcamera::SensorOrientation;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
+using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
 using ::aidl::android::hardware::camera::device::BnCameraDeviceCallback;
 using ::aidl::android::hardware::camera::device::BufferRequest;
@@ -96,23 +107,13 @@
   MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int), (override));
 };
 
-class VirtualCameraSessionTest : public ::testing::Test {
+class VirtualCameraSessionTestBase : public ::testing::Test {
  public:
-  void SetUp() override {
+  virtual void SetUp() override {
     mMockCameraDeviceCallback =
         ndk::SharedRefBase::make<MockCameraDeviceCallback>();
     mMockVirtualCameraClientCallback =
         ndk::SharedRefBase::make<MockVirtualCameraCallback>();
-    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
-        kCameraId,
-        std::vector<SupportedStreamConfiguration>{
-            SupportedStreamConfiguration{.width = kWidth,
-                                         .height = kHeight,
-                                         .pixelFormat = Format::YUV_420_888}},
-        mMockVirtualCameraClientCallback);
-    mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
-        mVirtualCameraDevice, mMockCameraDeviceCallback,
-        mMockVirtualCameraClientCallback);
 
     // Explicitly defining default actions below to prevent gmock from
     // default-constructing ndk::ScopedAStatus, because default-constructed
@@ -138,6 +139,36 @@
  protected:
   std::shared_ptr<MockCameraDeviceCallback> mMockCameraDeviceCallback;
   std::shared_ptr<MockVirtualCameraCallback> mMockVirtualCameraClientCallback;
+};
+
+class VirtualCameraSessionTest : public VirtualCameraSessionTestBase {
+ public:
+  void SetUp() override {
+    VirtualCameraSessionTestBase::SetUp();
+
+    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = {SupportedStreamConfiguration{
+                                           .width = kVgaWidth,
+                                           .height = kVgaHeight,
+                                           .pixelFormat = Format::YUV_420_888,
+                                           .maxFps = kMaxFps},
+                                       SupportedStreamConfiguration{
+                                           .width = kSvgaWidth,
+                                           .height = kSvgaHeight,
+                                           .pixelFormat = Format::YUV_420_888,
+                                           .maxFps = kMaxFps}},
+            .virtualCameraCallback = mMockVirtualCameraClientCallback,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT},
+        kDefaultDeviceId);
+    mVirtualCameraSession = ndk::SharedRefBase::make<VirtualCameraSession>(
+        mVirtualCameraDevice, mMockCameraDeviceCallback,
+        mMockVirtualCameraClientCallback);
+  }
+
+ protected:
   std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
   std::shared_ptr<VirtualCameraSession> mVirtualCameraSession;
 };
@@ -146,18 +177,22 @@
   PixelFormat format = PixelFormat::YCBCR_420_888;
   StreamConfiguration streamConfiguration;
   streamConfiguration.streams = {
-      createStream(kStreamId, kWidth, kHeight, format)};
+      createStream(kStreamId, kVgaWidth, kVgaHeight, format),
+      createStream(kSecondStreamId, kSvgaWidth, kSvgaHeight, format)};
   std::vector<HalStream> halStreams;
-  EXPECT_CALL(
-      *mMockVirtualCameraClientCallback,
-      onStreamConfigured(kStreamId, _, kWidth, kHeight, Format::YUV_420_888));
+
+  // Expect highest resolution to be picked for the client input.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
 
   ASSERT_TRUE(
       mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
           .isOk());
 
   EXPECT_THAT(halStreams, SizeIs(streamConfiguration.streams.size()));
-  EXPECT_THAT(mVirtualCameraSession->getStreamIds(), ElementsAre(0));
+  EXPECT_THAT(mVirtualCameraSession->getStreamIds(),
+              ElementsAre(kStreamId, kSecondStreamId));
 }
 
 TEST_F(VirtualCameraSessionTest, SecondConfigureDropsUnreferencedStreams) {
@@ -165,18 +200,18 @@
   StreamConfiguration streamConfiguration;
   std::vector<HalStream> halStreams;
 
-  streamConfiguration.streams = {createStream(0, kWidth, kHeight, format),
-                                 createStream(1, kWidth, kHeight, format),
-                                 createStream(2, kWidth, kHeight, format)};
+  streamConfiguration.streams = {createStream(0, kVgaWidth, kVgaHeight, format),
+                                 createStream(1, kVgaWidth, kVgaHeight, format),
+                                 createStream(2, kVgaWidth, kVgaHeight, format)};
   ASSERT_TRUE(
       mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
           .isOk());
 
   EXPECT_THAT(mVirtualCameraSession->getStreamIds(), ElementsAre(0, 1, 2));
 
-  streamConfiguration.streams = {createStream(0, kWidth, kHeight, format),
-                                 createStream(2, kWidth, kHeight, format),
-                                 createStream(3, kWidth, kHeight, format)};
+  streamConfiguration.streams = {createStream(0, kVgaWidth, kVgaHeight, format),
+                                 createStream(2, kVgaWidth, kVgaHeight, format),
+                                 createStream(3, kVgaWidth, kVgaHeight, format)};
   ASSERT_TRUE(
       mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
           .isOk());
@@ -201,8 +236,8 @@
 
 TEST_F(VirtualCameraSessionTest, onProcessCaptureRequestTriggersClientCallback) {
   StreamConfiguration streamConfiguration;
-  streamConfiguration.streams = {
-      createStream(kStreamId, kWidth, kHeight, PixelFormat::YCBCR_420_888)};
+  streamConfiguration.streams = {createStream(kStreamId, kVgaWidth, kVgaHeight,
+                                              PixelFormat::YCBCR_420_888)};
   std::vector<CaptureRequest> requests(1);
   requests[0].frameNumber = 42;
   requests[0].settings = *(
@@ -226,8 +261,8 @@
 
 TEST_F(VirtualCameraSessionTest, configureAfterCameraRelease) {
   StreamConfiguration streamConfiguration;
-  streamConfiguration.streams = {
-      createStream(kStreamId, kWidth, kHeight, PixelFormat::YCBCR_420_888)};
+  streamConfiguration.streams = {createStream(kStreamId, kVgaWidth, kVgaHeight,
+                                              PixelFormat::YCBCR_420_888)};
   std::vector<HalStream> halStreams;
 
   // Release virtual camera.
@@ -240,6 +275,110 @@
       Eq(static_cast<int32_t>(Status::CAMERA_DISCONNECTED)));
 }
 
+TEST_F(VirtualCameraSessionTest, ConfigureWithEmptyStreams) {
+  StreamConfiguration streamConfiguration;
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_THAT(
+      mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .getServiceSpecificError(),
+      Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+TEST_F(VirtualCameraSessionTest, ConfigureWithDifferentAspectRatioFails) {
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {
+      createStream(kStreamId, kVgaWidth, kVgaHeight, PixelFormat::YCBCR_420_888),
+      createStream(kSecondStreamId, kVgaHeight, kVgaWidth,
+                   PixelFormat::YCBCR_420_888)};
+
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_THAT(
+      mVirtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .getServiceSpecificError(),
+      Eq(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT)));
+}
+
+class VirtualCameraSessionInputChoiceTest : public VirtualCameraSessionTestBase {
+ public:
+  std::shared_ptr<VirtualCameraSession> createSession(
+      const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
+    mVirtualCameraDevice = ndk::SharedRefBase::make<VirtualCameraDevice>(
+        kCameraId,
+        VirtualCameraConfiguration{
+            .supportedStreamConfigs = supportedInputConfigs,
+            .virtualCameraCallback = mMockVirtualCameraClientCallback,
+            .sensorOrientation = SensorOrientation::ORIENTATION_0,
+            .lensFacing = LensFacing::FRONT},
+        kDefaultDeviceId);
+    return ndk::SharedRefBase::make<VirtualCameraSession>(
+        mVirtualCameraDevice, mMockCameraDeviceCallback,
+        mMockVirtualCameraClientCallback);
+  }
+
+ protected:
+  std::shared_ptr<VirtualCameraDevice> mVirtualCameraDevice;
+};
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       configureChoosesCorrectInputStreamForDownsampledOutput) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kVgaWidth, kVgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       configureChoosesCorrectInputStreamForMatchingResolution) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // Configure VGA stream. Expect SVGA input to be chosen to downscale from.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect configuration attempt returns CAMERA_DISCONNECTED service specific code.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+                                 Format::RGBA_8888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 510fd33..7554a67 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -68,12 +68,13 @@
     })";
 
 constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
+  uniform mat4 aTextureTransformMatrix; // Transform matrix given by surface texture.
   in vec4 aPosition;
   in vec2 aTextureCoord;
   out vec2 vTextureCoord;
   void main() {
     gl_Position = aPosition;
-    vTextureCoord = aTextureCoord;
+    vTextureCoord = (aTextureTransformMatrix * vec4(aTextureCoord, 0.0, 1.0)).xy;
   })";
 
 constexpr char kExternalYuvTextureFragmentShader[] = R"(#version 300 es
@@ -100,10 +101,12 @@
     })";
 
 constexpr int kCoordsPerVertex = 3;
-constexpr std::array<float, 12> kSquareCoords{-1.f, 1.0f, 0.0f,  // top left
-                                              -1.f, -1.f, 0.0f,  // bottom left
-                                              1.0f, -1.f, 0.0f,  // bottom right
-                                              1.0f, 1.0f, 0.0f};  // top right
+
+constexpr std::array<float, 12> kSquareCoords{
+    -1.f, -1.0f, 0.0f,   // top left
+    -1.f, 1.f,   0.0f,   // bottom left
+    1.0f, 1.f,   0.0f,   // bottom right
+    1.0f, -1.0f, 0.0f};  // top right
 
 constexpr std::array<float, 8> kTextureCoords{0.0f, 1.0f,   // top left
                                               0.0f, 0.0f,   // bottom left
@@ -265,32 +268,50 @@
   } else {
     ALOGE("External texture EGL shader program initialization failed.");
   }
+
+  // Lookup and cache handles to uniforms & attributes.
+  mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+  mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+  mTransformMatrixHandle =
+      glGetUniformLocation(mProgram, "aTextureTransformMatrix");
+  mTextureHandle = glGetUniformLocation(mProgram, "uTexture");
+
+  // Pass vertex array to the shader.
+  glEnableVertexAttribArray(mPositionHandle);
+  glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+                        kSquareCoords.size(), kSquareCoords.data());
+
+  // Pass texture coordinates corresponding to vertex array to the shader.
+  glEnableVertexAttribArray(mTextureCoordHandle);
+  glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+                        kTextureCoords.size(), kTextureCoords.data());
 }
 
-bool EglTextureProgram::draw(GLuint textureId) {
+EglTextureProgram::~EglTextureProgram() {
+  if (mPositionHandle != -1) {
+    glDisableVertexAttribArray(mPositionHandle);
+  }
+  if (mTextureCoordHandle != -1) {
+    glDisableVertexAttribArray(mTextureCoordHandle);
+  }
+}
+
+bool EglTextureProgram::draw(GLuint textureId,
+                             const std::array<float, 16>& transformMatrix) {
   // Load compiled shader.
   glUseProgram(mProgram);
   if (checkEglError("glUseProgram")) {
     return false;
   }
 
-  // Pass vertex array to the shader.
-  int positionHandle = glGetAttribLocation(mProgram, "aPosition");
-  glEnableVertexAttribArray(positionHandle);
-  glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
-                        kSquareCoords.size(), kSquareCoords.data());
-
-  // Pass texture coordinates corresponding to vertex array to the shader.
-  int textureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
-  glEnableVertexAttribArray(textureCoordHandle);
-  glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, false,
-                        kTextureCoords.size(), kTextureCoords.data());
+  // Pass transformation matrix for the texture coordinates.
+  glUniformMatrix4fv(mTransformMatrixHandle, 1, /*transpose=*/GL_FALSE,
+                     transformMatrix.data());
 
   // Configure texture for the shader.
-  int textureHandle = glGetUniformLocation(mProgram, "uTexture");
   glActiveTexture(GL_TEXTURE0);
   glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId);
-  glUniform1i(textureHandle, 0);
+  glUniform1i(mTextureHandle, 0);
 
   // Draw triangle strip forming a square filling the viewport.
   glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index 1b5f2cd..c695cbb 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
 
+#include <array>
+
 #include "GLES/gl.h"
 
 namespace android {
@@ -58,8 +60,23 @@
   enum class TextureFormat { RGBA, YUV };
 
   EglTextureProgram(TextureFormat textureFormat = TextureFormat::YUV);
+  virtual ~EglTextureProgram();
 
-  bool draw(GLuint textureId);
+  // Draw texture over whole viewport, applying transformMatrix to texture
+  // coordinates.
+  //
+  // Transform matrix is 4x4 matrix represented in column-major order and is
+  // applied to texture coordinates in (s,t,0,1), s,t from <0,1> range prior to
+  // sampling:
+  //
+  // textureCoord = transformMatrix * vec4(s,t,0,1).xy
+  bool draw(GLuint textureId, const std::array<float, 16>& transformMatrix);
+
+ private:
+  int mPositionHandle = -1;
+  int mTextureCoordHandle = -1;
+  int mTransformMatrixHandle = -1;
+  int mTextureHandle = -1;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 5b479c0..9f26e19 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -68,6 +68,16 @@
   return mTextureId;
 }
 
+GLuint EglSurfaceTexture::getTextureId() const {
+  return mTextureId;
+}
+
+std::array<float, 16> EglSurfaceTexture::getTransformMatrix() {
+  std::array<float, 16> matrix;
+  mGlConsumer->getTransformMatrix(matrix.data());
+  return matrix;
+}
+
 uint32_t EglSurfaceTexture::getWidth() const {
   return mWidth;
 }
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index 14dc7d6..faad7c4 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -57,6 +57,17 @@
   // Returns EGL texture id of the texture.
   GLuint updateTexture();
 
+  // Returns EGL texture id of the underlying texture.
+  GLuint getTextureId() const;
+
+  // Returns 4x4 transformation matrix in column-major order,
+  // which should be applied to EGL texture coordinates
+  // before sampling from the texture backed by android native buffer,
+  // so the corresponding region of the underlying buffer is sampled.
+  //
+  // See SurfaceTexture.getTransformMatrix for more details.
+  std::array<float, 16> getTransformMatrix();
+
  private:
   sp<IGraphicBufferProducer> mBufferProducer;
   sp<IGraphicBufferConsumer> mBufferConsumer;
diff --git a/services/camera/virtualcamera/util/JpegUtil.cc b/services/camera/virtualcamera/util/JpegUtil.cc
index 2b19c13..b034584 100644
--- a/services/camera/virtualcamera/util/JpegUtil.cc
+++ b/services/camera/virtualcamera/util/JpegUtil.cc
@@ -14,19 +14,20 @@
  * limitations under the License.
  */
 // #define LOG_NDEBUG 0
+#include "system/graphics.h"
 #define LOG_TAG "JpegUtil"
-#include "JpegUtil.h"
-
 #include <cstddef>
 #include <cstdint>
-#include <memory>
+#include <optional>
 #include <vector>
 
+#include "JpegUtil.h"
 #include "android/hardware_buffer.h"
 #include "jpeglib.h"
 #include "log/log.h"
 #include "ui/GraphicBuffer.h"
 #include "ui/GraphicBufferMapper.h"
+#include "util/Util.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -34,11 +35,11 @@
 namespace virtualcamera {
 namespace {
 
-constexpr int kJpegQuality = 80;
+constexpr int k2DCTSIZE = 2 * DCTSIZE;
 
 class LibJpegContext {
  public:
-  LibJpegContext(int width, int height, const size_t outBufferSize,
+  LibJpegContext(int width, int height, int quality, const size_t outBufferSize,
                  void* outBuffer)
       : mWidth(width),
         mHeight(height),
@@ -76,7 +77,7 @@
     jpeg_set_defaults(&mCompressStruct);
 
     // Set quality and colorspace.
-    jpeg_set_quality(&mCompressStruct, kJpegQuality, 1);
+    jpeg_set_quality(&mCompressStruct, quality, 1);
     jpeg_set_colorspace(&mCompressStruct, JCS_YCbCr);
 
     // Configure RAW input mode - this let's libjpeg know we're providing raw,
@@ -94,11 +95,63 @@
     mCompressStruct.comp_info[2].v_samp_factor = 1;
   }
 
-  bool compress(const android_ycbcr& ycbr) {
+  LibJpegContext& setApp1Data(const uint8_t* app1Data, const size_t size) {
+    mApp1Data = app1Data;
+    mApp1DataSize = size;
+    return *this;
+  }
+
+  std::optional<size_t> compress(std::shared_ptr<AHardwareBuffer> inBuffer) {
+    GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(inBuffer.get());
+
+    if (gBuffer == nullptr) {
+      ALOGE("%s: Input graphic buffer is nullptr", __func__);
+      return std::nullopt;
+    }
+
+    if (gBuffer->getPixelFormat() != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+      // This should never happen since we're allocating the temporary buffer
+      // with YUV420 layout above.
+      ALOGE("%s: Cannot compress non-YUV buffer (pixelFormat %d)", __func__,
+            gBuffer->getPixelFormat());
+      return std::nullopt;
+    }
+
+    YCbCrLockGuard yCbCrLock(inBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN);
+    if (yCbCrLock.getStatus() != OK) {
+      ALOGE("%s: Failed to lock the input buffer: %s", __func__,
+            statusToString(yCbCrLock.getStatus()).c_str());
+      return std::nullopt;
+    }
+    const android_ycbcr& ycbr = *yCbCrLock;
+
+    const int inBufferWidth = gBuffer->getWidth();
+    const int inBufferHeight = gBuffer->getHeight();
+
+    if (inBufferWidth % k2DCTSIZE || (inBufferHeight % k2DCTSIZE)) {
+      ALOGE(
+          "%s: Compressing YUV420 buffer with size %dx%d not aligned with 2 * "
+          "DCTSIZE (%d) is not currently supported.",
+          __func__, inBufferWidth, inBufferHeight, DCTSIZE);
+      return std::nullopt;
+    }
+
+    if (inBufferWidth < mWidth || inBufferHeight < mHeight) {
+      ALOGE(
+          "%s: Input buffer has smaller size (%dx%d) than image to be "
+          "compressed (%dx%d)",
+          __func__, inBufferWidth, inBufferHeight, mWidth, mHeight);
+      return std::nullopt;
+    }
+
+    // Chroma planes have 1/2 resolution of the original image.
+    const int cHeight = inBufferHeight / 2;
+    const int cWidth = inBufferWidth / 2;
+
     // Prepare arrays of pointers to scanlines of each plane.
-    std::vector<JSAMPROW> yLines(mHeight);
-    std::vector<JSAMPROW> cbLines(mHeight / 2);
-    std::vector<JSAMPROW> crLines(mHeight / 2);
+    std::vector<JSAMPROW> yLines(inBufferHeight);
+    std::vector<JSAMPROW> cbLines(cHeight);
+    std::vector<JSAMPROW> crLines(cHeight);
 
     uint8_t* y = static_cast<uint8_t*>(ycbr.y);
     uint8_t* cb = static_cast<uint8_t*>(ycbr.cb);
@@ -107,42 +160,34 @@
     // Since UV samples might be interleaved (semiplanar) we need to copy
     // them to separate planes, since libjpeg doesn't directly
     // support processing semiplanar YUV.
-    const int c_samples = (mWidth / 2) * (mHeight / 2);
-    std::vector<uint8_t> cb_plane(c_samples);
-    std::vector<uint8_t> cr_plane(c_samples);
+    const int cSamples = cWidth * cHeight;
+    std::vector<uint8_t> cb_plane(cSamples);
+    std::vector<uint8_t> cr_plane(cSamples);
 
     // TODO(b/301023410) - Use libyuv or ARM SIMD for "unzipping" the data.
-    for (int i = 0; i < c_samples; ++i) {
-      cb_plane[i] = *cb;
-      cr_plane[i] = *cr;
-      cb += ycbr.chroma_step;
-      cr += ycbr.chroma_step;
+    int out_idx = 0;
+    for (int i = 0; i < cHeight; ++i) {
+      for (int j = 0; j < cWidth; ++j) {
+        cb_plane[out_idx] = cb[j * ycbr.chroma_step];
+        cr_plane[out_idx] = cr[j * ycbr.chroma_step];
+        out_idx++;
+      }
+      cb += ycbr.cstride;
+      cr += ycbr.cstride;
     }
 
     // Collect pointers to individual scanline of each plane.
-    for (int i = 0; i < mHeight; ++i) {
+    for (int i = 0; i < inBufferHeight; ++i) {
       yLines[i] = y + i * ycbr.ystride;
     }
-    for (int i = 0; i < (mHeight / 2); ++i) {
-      cbLines[i] = cb_plane.data() + i * (mWidth / 2);
-      crLines[i] = cr_plane.data() + i * (mWidth / 2);
+    for (int i = 0; i < cHeight; ++i) {
+      cbLines[i] = cb_plane.data() + i * cWidth;
+      crLines[i] = cr_plane.data() + i * cWidth;
     }
 
     return compress(yLines, cbLines, crLines);
   }
 
-  bool compressBlackImage() {
-    // We only really need to prepare one scanline for Y and one shared scanline
-    // for Cb & Cr.
-    std::vector<uint8_t> yLine(mWidth, 0);
-    std::vector<uint8_t> chromaLine(mWidth / 2, 0xff / 2);
-
-    std::vector<JSAMPROW> yLines(mHeight, yLine.data());
-    std::vector<JSAMPROW> cLines(mHeight / 2, chromaLine.data());
-
-    return compress(yLines, cLines, cLines);
-  }
-
  private:
   void setSuccess(const boolean success) {
     mSuccess = success;
@@ -165,11 +210,18 @@
   // Takes vector of pointers to Y / Cb / Cr scanlines as an input. Length of
   // each vector needs to correspond to height of corresponding plane.
   //
-  // Returns true if compression is successful, false otherwise.
-  bool compress(std::vector<JSAMPROW>& yLines, std::vector<JSAMPROW>& cbLines,
-                std::vector<JSAMPROW>& crLines) {
+  // Returns size of compressed image in bytes on success, empty optional otherwise.
+  std::optional<size_t> compress(std::vector<JSAMPROW>& yLines,
+                                 std::vector<JSAMPROW>& cbLines,
+                                 std::vector<JSAMPROW>& crLines) {
     jpeg_start_compress(&mCompressStruct, TRUE);
 
+    if (mApp1Data != nullptr && mApp1DataSize > 0) {
+      ALOGV("%s: Writing exif, size %zu B", __func__, mApp1DataSize);
+      jpeg_write_marker(&mCompressStruct, JPEG_APP0 + 1,
+                        static_cast<const JOCTET*>(mApp1Data), mApp1DataSize);
+    }
+
     while (mCompressStruct.next_scanline < mCompressStruct.image_height) {
       const uint32_t batchSize = DCTSIZE * 2;
       const uint32_t nl = mCompressStruct.next_scanline;
@@ -181,11 +233,11 @@
         ALOGE("%s: compressed %u lines, expected %u (total %u/%u)",
               __FUNCTION__, done, batchSize, mCompressStruct.next_scanline,
               mCompressStruct.image_height);
-        return false;
+        return std::nullopt;
       }
     }
     jpeg_finish_compress(&mCompressStruct);
-    return mSuccess;
+    return mEncodedSize;
   }
 
   // === libjpeg callbacks below ===
@@ -217,6 +269,10 @@
   jpeg_error_mgr mErrorMgr;
   jpeg_destination_mgr mDestinationMgr;
 
+  // APP1 data.
+  const uint8_t* mApp1Data = nullptr;
+  size_t mApp1DataSize = 0;
+
   // Dimensions of the input image.
   int mWidth;
   int mHeight;
@@ -233,17 +289,28 @@
   boolean mSuccess = true;
 };
 
-}  // namespace
-
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer).compress(ycbcr);
+int roundTo2DCTMultiple(const int n) {
+  const int mod = n % k2DCTSIZE;
+  return mod == 0 ? n : n + (k2DCTSIZE - mod);
 }
 
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer) {
-  return LibJpegContext(width, height, outBufferSize, outBuffer)
-      .compressBlackImage();
+}  // namespace
+
+std::optional<size_t> compressJpeg(const int width, const int height,
+                                   const int quality,
+                                   std::shared_ptr<AHardwareBuffer> inBuffer,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer) {
+  LibJpegContext context(width, height, quality, outBufferSize, outBuffer);
+  if (!app1ExifData.empty()) {
+    context.setApp1Data(app1ExifData.data(), app1ExifData.size());
+  }
+  return context.compress(inBuffer);
+}
+
+Resolution roundTo2DctSize(const Resolution resolution) {
+  return Resolution(roundTo2DCTMultiple(resolution.width),
+                    roundTo2DCTMultiple(resolution.height));
 }
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index c44d0a8..184dd56 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -17,24 +17,36 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 
-#include <memory>
+#include <optional>
 
 #include "android/hardware_buffer.h"
-#include "system/graphics.h"
+#include "util/Util.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
 // Jpeg-compress image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressJpeg(int width, int height, const android_ycbcr& ycbcr,
-                  size_t outBufferSize, void* outBuffer);
+// * width - width of the image, can be less than width of inBuffer.
+// * heigh - height of the image, can be less than height of inBuffer.
+// * quality - 0-100, higher number corresponds to higher quality.
+// * inBuffer - Input buffer, the dimensions of the buffer must be aligned to
+//   2*DCT_SIZE (16) to include necessary padding in case width and height of
+//   image is not aligned with 2*DCT_SIZE.
+// * app1ExifData - vector containing data to be included in APP1
+//   segment. Can be empty.
+// * outBufferSize - capacity of the output buffer.
+// * outBuffer - output buffer to write compressed data into.
+// Returns size of compressed data if the compression was successful,
+// empty optional otherwise.
+std::optional<size_t> compressJpeg(int width, int height, int quality,
+                                   std::shared_ptr<AHardwareBuffer> inBuffer,
+                                   const std::vector<uint8_t>& app1ExifData,
+                                   size_t outBufferSize, void* outBuffer);
 
-// Jpeg-compress all-black image into the output buffer.
-// Returns true if the compression was successful, false otherwise.
-bool compressBlackJpeg(int width, int height, size_t outBufferSize,
-                       void* outBuffer);
+// Round the resolution to the closest higher resolution where width and height
+// are divisible by 2*DCT_SIZE ().
+Resolution roundTo2DctSize(Resolution resolution);
 
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.cc b/services/camera/virtualcamera/util/MetadataBuilder.cc
deleted file mode 100644
index 92a48b9..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.cc
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "MetadataBuilder"
-
-#include "MetadataBuilder.h"
-
-#include <algorithm>
-#include <cstdint>
-#include <iterator>
-#include <memory>
-#include <utility>
-#include <variant>
-#include <vector>
-
-#include "CameraMetadata.h"
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "log/log.h"
-#include "system/camera_metadata.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using ::android::hardware::camera::common::helper::CameraMetadata;
-
-template <typename To, typename From>
-std::vector<To> convertTo(const std::vector<From>& from) {
-  std::vector<To> to;
-  to.reserve(from.size());
-  std::transform(from.begin(), from.end(), std::back_inserter(to),
-                 [](const From& x) { return static_cast<To>(x); });
-  return to;
-}
-
-}  // namespace
-
-MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
-    camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
-  mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
-      std::vector<uint8_t>({static_cast<uint8_t>(hwLevel)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
-  const uint8_t metadataVal = flashAvailable
-                                  ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
-                                  : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
-  mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = std::vector<uint8_t>({metadataVal});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setLensFacing(
-    camera_metadata_enum_android_lens_facing lensFacing) {
-  mEntryMap[ANDROID_LENS_FACING] =
-      std::vector<uint8_t>({static_cast<uint8_t>(lensFacing)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
-  mEntryMap[ANDROID_SENSOR_ORIENTATION] =
-      std::vector<int32_t>({sensorOrientation});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorTimestamp(
-    std::chrono::nanoseconds timestamp) {
-  mEntryMap[ANDROID_SENSOR_TIMESTAMP] =
-      std::vector<int64_t>({timestamp.count()});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
-    const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
-        faceDetectModes) {
-  mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
-      convertTo<uint8_t>(faceDetectModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAvailableModes(
-    const std::vector<camera_metadata_enum_android_control_mode_t>&
-        availableModes) {
-  mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
-      convertTo<uint8_t>(availableModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
-    const std::vector<camera_metadata_enum_android_control_af_mode_t>&
-        availableModes) {
-  mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
-      convertTo<uint8_t>(availableModes);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfMode(
-    const camera_metadata_enum_android_control_af_mode_t mode) {
-  mEntryMap[ANDROID_CONTROL_AF_MODE] =
-      std::vector<uint8_t>({static_cast<uint8_t>(mode)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRange(
-    const int32_t minFps, const int32_t maxFps) {
-  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] =
-      std::vector<int32_t>({minFps, maxFps});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
-                                                       int32_t maxAwbRegions,
-                                                       int32_t maxAfRegions) {
-  mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
-      std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeRegions(
-    const std::vector<ControlRegion>& aeRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * aeRegions.size());
-  for (const ControlRegion& region : aeRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAfRegions(
-    const std::vector<ControlRegion>& afRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * afRegions.size());
-  for (const ControlRegion& region : afRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAwbRegions(
-    const std::vector<ControlRegion>& awbRegions) {
-  std::vector<int32_t> regions;
-  regions.reserve(5 * awbRegions.size());
-  for (const ControlRegion& region : awbRegions) {
-    regions.push_back(region.x0);
-    regions.push_back(region.y0);
-    regions.push_back(region.x1);
-    regions.push_back(region.y1);
-    regions.push_back(region.weight);
-  }
-  mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
-    const camera_metadata_enum_android_control_capture_intent_t intent) {
-  mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] =
-      std::vector<uint8_t>({static_cast<uint8_t>(intent)});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
-  mEntryMap[ANDROID_JPEG_MAX_SIZE] = std::vector<int32_t>({size});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
-    const std::vector<StreamConfiguration>& streamConfigurations) {
-  std::vector<int32_t> metadataStreamConfigs;
-  std::vector<int64_t> metadataMinFrameDurations;
-  std::vector<int64_t> metadataStallDurations;
-  metadataStreamConfigs.reserve(streamConfigurations.size());
-  metadataMinFrameDurations.reserve(streamConfigurations.size());
-  metadataStallDurations.reserve(streamConfigurations.size());
-
-  for (const auto& config : streamConfigurations) {
-    metadataStreamConfigs.push_back(config.format);
-    metadataStreamConfigs.push_back(config.width);
-    metadataStreamConfigs.push_back(config.height);
-    metadataStreamConfigs.push_back(
-        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
-
-    metadataMinFrameDurations.push_back(config.format);
-    metadataMinFrameDurations.push_back(config.width);
-    metadataMinFrameDurations.push_back(config.height);
-    metadataMinFrameDurations.push_back(config.minFrameDuration.count());
-
-    metadataStallDurations.push_back(config.format);
-    metadataStallDurations.push_back(config.width);
-    metadataStallDurations.push_back(config.height);
-    metadataStallDurations.push_back(config.minStallDuration.count());
-  }
-
-  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
-      metadataStreamConfigs;
-  mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
-      metadataMinFrameDurations;
-  mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] =
-      metadataMinFrameDurations;
-
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
-  mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
-      std::vector<float>(maxZoom);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
-                                                           const float max) {
-  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
-                                                           int x1, int y1) {
-  mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
-      std::vector<int32_t>({x0, y0, x1, y1});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
-                                                                int32_t max) {
-  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
-      std::vector<int32_t>({min, max});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
-    const camera_metadata_rational step) {
-  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
-      std::vector<camera_metadata_rational>({step});
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
-    const std::vector<int32_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
-    const std::vector<int32_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
-    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
-        capabilities) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
-      convertTo<uint8_t>(capabilities);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
-    const std::vector<camera_metadata_tag_t>& keys) {
-  mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
-      convertTo<int32_t>(keys);
-  return *this;
-}
-
-MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
-  std::vector<camera_metadata_tag_t> availableKeys;
-  availableKeys.reserve(mEntryMap.size());
-  for (const auto& [key, _] : mEntryMap) {
-    if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
-      availableKeys.push_back(key);
-    }
-  }
-  setAvailableCharacteristicKeys(availableKeys);
-  return *this;
-}
-
-std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
-MetadataBuilder::build() const {
-  CameraMetadata metadataHelper;
-  for (const auto& entry : mEntryMap) {
-    status_t ret = std::visit(
-        [&](auto&& arg) {
-          return metadataHelper.update(entry.first, arg.data(), arg.size());
-        },
-        entry.second);
-    if (ret != NO_ERROR) {
-      ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
-            get_camera_metadata_tag_name(entry.first),
-            ::android::statusToString(ret).c_str());
-      return nullptr;
-    }
-  }
-
-  const camera_metadata_t* metadata = metadataHelper.getAndLock();
-  if (metadata == nullptr) {
-    ALOGE(
-        "Failure when constructing metadata -> CameraMetadata helper returned "
-        "nullptr");
-    return nullptr;
-  }
-
-  auto aidlMetadata =
-      std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
-  const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
-  aidlMetadata->metadata.assign(data_ptr,
-                                data_ptr + get_camera_metadata_size(metadata));
-  metadataHelper.unlock(metadata);
-
-  return aidlMetadata;
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataBuilder.h b/services/camera/virtualcamera/util/MetadataBuilder.h
deleted file mode 100644
index d992d31..0000000
--- a/services/camera/virtualcamera/util/MetadataBuilder.h
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
-
-#include <chrono>
-#include <cstdint>
-#include <map>
-#include <memory>
-#include <variant>
-#include <vector>
-
-#include "aidl/android/hardware/camera/device/CameraMetadata.h"
-#include "system/camera_metadata.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Convenience builder for the
-// aidl::android::hardware::camera::device::CameraMetadata.
-//
-// Calling the same builder setter multiple will overwrite the value.
-// This class is not thread-safe.
-class MetadataBuilder {
- public:
-  struct StreamConfiguration {
-    int32_t width = 0;
-    int32_t height = 0;
-    int32_t format = 0;
-    // Minimal frame duration - corresponds to maximal FPS for given format.
-    // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
-    std::chrono::nanoseconds minFrameDuration{std::chrono::seconds(1) / 30};
-    // Minimal stall duration.
-    // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
-    std::chrono::nanoseconds minStallDuration{0};
-  };
-
-  struct ControlRegion {
-    int32_t x0 = 0;
-    int32_t y0 = 0;
-    int32_t x1 = 0;
-    int32_t y1 = 0;
-    int32_t weight = 0;
-  };
-
-  MetadataBuilder() = default;
-  ~MetadataBuilder() = default;
-
-  // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
-  MetadataBuilder& setSupportedHardwareLevel(
-      camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
-
-  // Whether this camera device has a flash unit
-  // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
-  MetadataBuilder& setFlashAvailable(bool flashAvailable);
-
-  // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
-  MetadataBuilder& setLensFacing(
-      camera_metadata_enum_android_lens_facing lensFacing);
-
-  // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
-
-  // Time at start of exposure of first row of the image
-  // sensor active array, in nanoseconds.
-  //
-  // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
-
-  // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
-  MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
-
-  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableFaceDetectModes(
-      const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
-          faceDetectMode);
-
-  // Sets available stream configurations along with corresponding minimal frame
-  // durations (corresponding to max fps) and stall durations.
-  //
-  // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
-  // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
-  // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableOutputStreamConfigurations(
-      const std::vector<StreamConfiguration>& streamConfigurations);
-
-  // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAvailableModes(
-      const std::vector<camera_metadata_enum_android_control_mode_t>&
-          availableModes);
-
-  // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
-
-  // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
-
-  // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfAvailableModes(
-      const std::vector<camera_metadata_enum_android_control_af_mode_t>&
-          availableModes);
-
-  // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfMode(
-      const camera_metadata_enum_android_control_af_mode_t mode);
-
-  // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeAvailableFpsRange(int32_t min, int32_t max);
-
-  // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlCaptureIntent(
-      camera_metadata_enum_android_control_capture_intent_t intent);
-
-  // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
-                                        int32_t maxAwbRegions,
-                                        int32_t maxAfRegions);
-
-  // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAeRegions(
-      const std::vector<ControlRegion>& aeRegions);
-
-  // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAwbRegions(
-      const std::vector<ControlRegion>& awbRegions);
-
-  // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlAfRegions(
-      const std::vector<ControlRegion>& afRegions);
-
-  // The size of the compressed JPEG image, in bytes.
-  //
-  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
-  MetadataBuilder& setMaxJpegSize(int32_t size);
-
-  // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
-
-  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
-  MetadataBuilder& setControlZoomRatioRange(float min, float max);
-
-  // A list of all keys that the camera device has available to use with
-  // CaptureRequest.
-  //
-  // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
-
-  // A list of all keys that the camera device has available to use with
-  // CaptureResult.
-  //
-  // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
-
-  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableCapabilities(
-      const std::vector<
-          camera_metadata_enum_android_request_available_capabilities_t>&
-          capabilities);
-
-  // A list of all keys that the camera device has available to use.
-  //
-  // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
-  MetadataBuilder& setAvailableCharacteristicKeys(
-      const std::vector<camera_metadata_tag_t>& keys);
-
-  // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
-  // containing all previously set tags.
-  MetadataBuilder& setAvailableCharacteristicKeys();
-
-  // Build CameraMetadata instance.
-  //
-  // Returns nullptr in case something went wrong.
-  std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
-  build() const;
-
- private:
-  // Maps metadata tags to vectors of values for the given tag.
-  std::map<camera_metadata_tag_t,
-           std::variant<std::vector<int64_t>, std::vector<int32_t>,
-                        std::vector<uint8_t>, std::vector<float>,
-                        std::vector<camera_metadata_rational_t>>>
-      mEntryMap;
-};
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATABUILDER_H
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
new file mode 100644
index 0000000..d8f6125
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -0,0 +1,952 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "MetadataUtil"
+
+#include "MetadataUtil.h"
+
+#include <algorithm>
+#include <cstdint>
+#include <iterator>
+#include <memory>
+#include <optional>
+#include <string>
+#include <utility>
+#include <variant>
+#include <vector>
+
+#include "CameraMetadata.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "log/log.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+#include "utils/Errors.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+namespace {
+
+using ::android::hardware::camera::common::helper::CameraMetadata;
+
+template <typename To, typename From>
+std::vector<To> convertTo(const std::vector<From>& from) {
+  std::vector<To> to;
+  to.reserve(from.size());
+  std::transform(from.begin(), from.end(), std::back_inserter(to),
+                 [](const From& x) { return static_cast<To>(x); });
+  return to;
+}
+
+template <typename To, typename From>
+std::vector<To> asVectorOf(const From from) {
+  return std::vector<To>({static_cast<To>(from)});
+}
+
+}  // namespace
+
+MetadataBuilder& MetadataBuilder::setSupportedHardwareLevel(
+    const camera_metadata_enum_android_info_supported_hardware_level_t hwLevel) {
+  mEntryMap[ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL] =
+      asVectorOf<uint8_t>(hwLevel);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setDeviceId(int32_t deviceId) {
+  mEntryMap[ANDROID_INFO_DEVICE_ID] = std::vector<int32_t>({deviceId});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashAvailable(bool flashAvailable) {
+  const uint8_t metadataVal = flashAvailable
+                                  ? ANDROID_FLASH_INFO_AVAILABLE_TRUE
+                                  : ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_FLASH_INFO_AVAILABLE] = asVectorOf<uint8_t>(metadataVal);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashState(
+    const camera_metadata_enum_android_flash_state_t flashState) {
+  mEntryMap[ANDROID_FLASH_STATE] = asVectorOf<uint8_t>(flashState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFlashMode(
+    const camera_metadata_enum_android_flash_mode_t flashMode) {
+  mEntryMap[ANDROID_FLASH_MODE] = asVectorOf<uint8_t>(flashMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensFacing(
+    const camera_metadata_enum_android_lens_facing lensFacing) {
+  mEntryMap[ANDROID_LENS_FACING] = asVectorOf<uint8_t>(lensFacing);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorReadoutTimestamp(
+    const camera_metadata_enum_android_sensor_readout_timestamp_t
+        sensorReadoutTimestamp) {
+  mEntryMap[ANDROID_SENSOR_READOUT_TIMESTAMP] =
+      asVectorOf<uint8_t>(sensorReadoutTimestamp);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFocalLengths(
+    const std::vector<float>& focalLengths) {
+  mEntryMap[ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS] = focalLengths;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFocalLength(float focalLength) {
+  mEntryMap[ANDROID_LENS_FOCAL_LENGTH] = asVectorOf<float>(focalLength);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorOrientation(int32_t sensorOrientation) {
+  mEntryMap[ANDROID_SENSOR_ORIENTATION] = asVectorOf<int32_t>(sensorOrientation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestampSource(
+    const camera_metadata_enum_android_sensor_info_timestamp_source_t
+        timestampSource) {
+  mEntryMap[ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE] =
+      asVectorOf<uint8_t>(timestampSource);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorTimestamp(
+    std::chrono::nanoseconds timestamp) {
+  mEntryMap[ANDROID_SENSOR_TIMESTAMP] = asVectorOf<int64_t>(timestamp.count());
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableFaceDetectModes(
+    const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+        faceDetectModes) {
+  mEntryMap[ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES] =
+      convertTo<uint8_t>(faceDetectModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableTestPatternModes(
+    const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+        testPatternModes) {
+  mEntryMap[ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES] =
+      convertTo<int32_t>(testPatternModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableStreamUseCases(
+    const std::vector<
+        camera_metadata_enum_android_scaler_available_stream_use_cases>&
+        availableUseCases) {
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES] =
+      convertTo<int64_t>(availableUseCases);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setFaceDetectMode(
+    const camera_metadata_enum_android_statistics_face_detect_mode_t
+        faceDetectMode) {
+  mEntryMap[ANDROID_STATISTICS_FACE_DETECT_MODE] =
+      asVectorOf<uint8_t>(faceDetectMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_mode_t>&
+        availableModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_MODES] =
+      convertTo<uint8_t>(availableModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMode(
+    const camera_metadata_enum_android_control_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableSceneModes(
+    const std::vector<camera_metadata_enum_android_control_scene_mode>&
+        availableSceneModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_SCENE_MODES] =
+      convertTo<uint8_t>(availableSceneModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlSceneMode(
+    const camera_metadata_enum_android_control_scene_mode sceneMode) {
+  mEntryMap[ANDROID_CONTROL_SCENE_MODE] = asVectorOf<uint8_t>(sceneMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableEffects(
+    const std::vector<camera_metadata_enum_android_control_effect_mode>&
+        availableEffects) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_EFFECTS] =
+      convertTo<uint8_t>(availableEffects);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlEffectMode(
+    const camera_metadata_enum_android_control_effect_mode_t effectMode) {
+  mEntryMap[ANDROID_CONTROL_EFFECT_MODE] = asVectorOf<uint8_t>(effectMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableVideoStabilizationModes(
+    const std::vector<
+        camera_metadata_enum_android_control_video_stabilization_mode_t>&
+        videoStabilizationModes) {
+  mEntryMap[ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES] =
+      convertTo<uint8_t>(videoStabilizationModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlVideoStabilizationMode(
+    const camera_metadata_enum_android_control_video_stabilization_mode
+        stabilizationMode) {
+  mEntryMap[ANDROID_CONTROL_VIDEO_STABILIZATION_MODE] =
+      asVectorOf<uint8_t>(stabilizationMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+        availableModes) {
+  mEntryMap[ANDROID_CONTROL_AF_AVAILABLE_MODES] =
+      convertTo<uint8_t>(availableModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfMode(
+    const camera_metadata_enum_android_control_af_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_AF_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfState(
+    const camera_metadata_enum_android_control_af_state afState) {
+  mEntryMap[ANDROID_CONTROL_AF_STATE] = asVectorOf<uint8_t>(afState);
+  return *this;
+}
+
+// See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+MetadataBuilder& MetadataBuilder::setControlAfTrigger(
+    const camera_metadata_enum_android_control_af_trigger_t trigger) {
+  mEntryMap[ANDROID_CONTROL_AF_TRIGGER] = asVectorOf<uint8_t>(trigger);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableFpsRanges(
+    const std::vector<FpsRange>& fpsRanges) {
+  std::vector<int32_t> ranges;
+  ranges.reserve(2 * fpsRanges.size());
+  for (const FpsRange fpsRange : fpsRanges) {
+    ranges.push_back(fpsRange.minFps);
+    ranges.push_back(fpsRange.maxFps);
+  }
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES] = std::move(ranges);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeTargetFpsRange(
+    const FpsRange fpsRange) {
+  mEntryMap[ANDROID_CONTROL_AE_TARGET_FPS_RANGE] =
+      std::vector<int32_t>({fpsRange.minFps, fpsRange.maxFps});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeMode(
+    const camera_metadata_enum_android_control_ae_mode_t mode) {
+  mEntryMap[ANDROID_CONTROL_AE_MODE] = asVectorOf<uint8_t>(mode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableModes(
+    const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes) {
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_MODES] = convertTo<uint8_t>(modes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAePrecaptureTrigger(
+    const camera_metadata_enum_android_control_ae_precapture_trigger_t trigger) {
+  mEntryMap[ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER] =
+      asVectorOf<uint8_t>(trigger);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlMaxRegions(int32_t maxAeRegions,
+                                                       int32_t maxAwbRegions,
+                                                       int32_t maxAfRegions) {
+  mEntryMap[ANDROID_CONTROL_MAX_REGIONS] =
+      std::vector<int32_t>({maxAeRegions, maxAwbRegions, maxAfRegions});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAvailableAwbModes(
+    const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes) {
+  mEntryMap[ANDROID_CONTROL_AWB_AVAILABLE_MODES] = convertTo<uint8_t>(awbModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbMode(
+    const camera_metadata_enum_android_control_awb_mode awbMode) {
+  mEntryMap[ANDROID_CONTROL_AWB_MODE] = asVectorOf<uint8_t>(awbMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbState(
+    const camera_metadata_enum_android_control_awb_state awbState) {
+  mEntryMap[ANDROID_CONTROL_AWB_STATE] = asVectorOf<uint8_t>(awbState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLockAvailable(
+    const bool awbLockAvailable) {
+  const uint8_t lockAvailable = awbLockAvailable
+                                    ? ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE
+                                    : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_CONTROL_AWB_LOCK_AVAILABLE] =
+      std::vector<uint8_t>({lockAvailable});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbLock(
+    const camera_metadata_enum_android_control_awb_lock awbLock) {
+  mEntryMap[ANDROID_CONTROL_AWB_LOCK] = asVectorOf<uint8_t>(awbLock);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAvailableAntibandingModes(
+    const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+        antibandingModes) {
+  mEntryMap[ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES] =
+      convertTo<uint8_t>(antibandingModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeAntibandingMode(
+    const camera_metadata_enum_android_control_ae_antibanding_mode_t
+        antibandingMode) {
+  mEntryMap[ANDROID_CONTROL_AE_ANTIBANDING_MODE] =
+      asVectorOf<uint8_t>(antibandingMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLockAvailable(
+    const bool aeLockAvailable) {
+  const uint8_t lockAvailable = aeLockAvailable
+                                    ? ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE
+                                    : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+  mEntryMap[ANDROID_CONTROL_AE_LOCK_AVAILABLE] =
+      asVectorOf<uint8_t>(lockAvailable);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeLock(
+    const camera_metadata_enum_android_control_ae_lock aeLock) {
+  mEntryMap[ANDROID_CONTROL_AE_LOCK] = asVectorOf<uint8_t>(aeLock);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeRegions(
+    const std::vector<ControlRegion>& aeRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * aeRegions.size());
+  for (const ControlRegion& region : aeRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AE_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAfRegions(
+    const std::vector<ControlRegion>& afRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * afRegions.size());
+  for (const ControlRegion& region : afRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AF_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAwbRegions(
+    const std::vector<ControlRegion>& awbRegions) {
+  std::vector<int32_t> regions;
+  regions.reserve(5 * awbRegions.size());
+  for (const ControlRegion& region : awbRegions) {
+    regions.push_back(region.x0);
+    regions.push_back(region.y0);
+    regions.push_back(region.x1);
+    regions.push_back(region.y1);
+    regions.push_back(region.weight);
+  }
+  mEntryMap[ANDROID_CONTROL_AWB_REGIONS] = std::move(regions);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlCaptureIntent(
+    const camera_metadata_enum_android_control_capture_intent_t intent) {
+  mEntryMap[ANDROID_CONTROL_CAPTURE_INTENT] = asVectorOf<uint8_t>(intent);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCropRegion(const int32_t x, const int32_t y,
+                                                const int32_t width,
+                                                const int32_t height) {
+  mEntryMap[ANDROID_SCALER_CROP_REGION] =
+      std::vector<int32_t>({x, y, width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
+  mEntryMap[ANDROID_JPEG_MAX_SIZE] = asVectorOf<int32_t>(size);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFrameDuration(
+    const std::chrono::nanoseconds duration) {
+  mEntryMap[ANDROID_SENSOR_INFO_MAX_FRAME_DURATION] =
+      asVectorOf<int64_t>(duration.count());
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegAvailableThumbnailSizes(
+    const std::vector<Resolution>& thumbnailSizes) {
+  std::vector<int32_t> sizes;
+  sizes.reserve(thumbnailSizes.size() * 2);
+  for (const Resolution& resolution : thumbnailSizes) {
+    sizes.push_back(resolution.width);
+    sizes.push_back(resolution.height);
+  }
+  mEntryMap[ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES] = std::move(sizes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegGpsCoordinates(
+    const GpsCoordinates& gpsCoordinates) {
+  mEntryMap[ANDROID_JPEG_GPS_COORDINATES] =
+      std::vector<double>({gpsCoordinates.latitude, gpsCoordinates.longitude,
+                           gpsCoordinates.altitude});
+
+  if (!gpsCoordinates.provider.empty()) {
+    mEntryMap[ANDROID_JPEG_GPS_PROCESSING_METHOD] = std::vector<uint8_t>{
+        gpsCoordinates.provider.begin(), gpsCoordinates.provider.end()};
+  }
+
+  if (gpsCoordinates.timestamp.has_value()) {
+    mEntryMap[ANDROID_JPEG_GPS_TIMESTAMP] =
+        asVectorOf<int64_t>(gpsCoordinates.timestamp.value());
+  }
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegOrientation(const int32_t orientation) {
+  mEntryMap[ANDROID_JPEG_ORIENTATION] = asVectorOf<int32_t>(orientation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailSize(const int width,
+                                                       const int height) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_SIZE] = std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setJpegThumbnailQuality(const uint8_t quality) {
+  mEntryMap[ANDROID_JPEG_THUMBNAIL_QUALITY] = asVectorOf<uint8_t>(quality);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
+    const int32_t maxRawStreams, const int32_t maxProcessedStreams,
+    const int32_t maxStallStreams) {
+  mEntryMap[ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS] = std::vector<int32_t>(
+      {maxRawStreams, maxProcessedStreams, maxStallStreams});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
+    const camera_metadata_enum_android_sync_max_latency latency) {
+  mEntryMap[ANDROID_SYNC_MAX_LATENCY] = asVectorOf<int32_t>(latency);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineMaxDepth(const uint8_t maxDepth) {
+  mEntryMap[ANDROID_REQUEST_PIPELINE_MAX_DEPTH] = asVectorOf<uint8_t>(maxDepth);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setPipelineDepth(const uint8_t depth) {
+  mEntryMap[ANDROID_REQUEST_PIPELINE_DEPTH] = asVectorOf<uint8_t>(depth);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestCapabilities(
+    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+        requestCapabilities) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+      convertTo<uint8_t>(requestCapabilities);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableOutputStreamConfigurations(
+    const std::vector<StreamConfiguration>& streamConfigurations) {
+  std::vector<int32_t> metadataStreamConfigs;
+  std::vector<int64_t> metadataMinFrameDurations;
+  std::vector<int64_t> metadataStallDurations;
+  metadataStreamConfigs.reserve(streamConfigurations.size());
+  metadataMinFrameDurations.reserve(streamConfigurations.size());
+  metadataStallDurations.reserve(streamConfigurations.size());
+
+  for (const auto& config : streamConfigurations) {
+    metadataStreamConfigs.push_back(config.format);
+    metadataStreamConfigs.push_back(config.width);
+    metadataStreamConfigs.push_back(config.height);
+    metadataStreamConfigs.push_back(
+        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+
+    metadataMinFrameDurations.push_back(config.format);
+    metadataMinFrameDurations.push_back(config.width);
+    metadataMinFrameDurations.push_back(config.height);
+    metadataMinFrameDurations.push_back(config.minFrameDuration.count());
+
+    metadataStallDurations.push_back(config.format);
+    metadataStallDurations.push_back(config.width);
+    metadataStallDurations.push_back(config.height);
+    metadataStallDurations.push_back(config.minStallDuration.count());
+  }
+
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS] =
+      std::move(metadataStreamConfigs);
+  mEntryMap[ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS] =
+      std::move(metadataMinFrameDurations);
+  mEntryMap[ANDROID_SCALER_AVAILABLE_STALL_DURATIONS] =
+      std::move(metadataStallDurations);
+
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableAberrationCorrectionModes(
+    const std::vector<camera_metadata_enum_android_color_correction_aberration_mode>&
+        aberrationCorectionModes) {
+  mEntryMap[ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES] =
+      convertTo<uint8_t>(aberrationCorectionModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAberrationCorrectionMode(
+    const camera_metadata_enum_android_color_correction_aberration_mode
+        aberrationCorrectionMode) {
+  mEntryMap[ANDROID_COLOR_CORRECTION_ABERRATION_MODE] =
+      asVectorOf<uint8_t>(aberrationCorrectionMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableNoiseReductionModes(
+    const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+        noiseReductionModes) {
+  mEntryMap[ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES] =
+      convertTo<uint8_t>(noiseReductionModes);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setNoiseReductionMode(
+    const camera_metadata_enum_android_noise_reduction_mode noiseReductionMode) {
+  mEntryMap[ANDROID_NOISE_REDUCTION_MODE] =
+      asVectorOf<uint8_t>(noiseReductionMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setRequestPartialResultCount(
+    const int partialResultCount) {
+  mEntryMap[ANDROID_REQUEST_PARTIAL_RESULT_COUNT] =
+      asVectorOf<int32_t>(partialResultCount);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setCroppingType(
+    const camera_metadata_enum_android_scaler_cropping_type croppingType) {
+  mEntryMap[ANDROID_SCALER_CROPPING_TYPE] = asVectorOf<uint8_t>(croppingType);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setMaxFaceCount(const int maxFaceCount) {
+  mEntryMap[ANDROID_STATISTICS_INFO_MAX_FACE_COUNT] =
+      asVectorOf<int32_t>(maxFaceCount);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableMaxDigitalZoom(const float maxZoom) {
+  mEntryMap[ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] =
+      asVectorOf<float>(maxZoom);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlZoomRatioRange(const float min,
+                                                           const float max) {
+  mEntryMap[ANDROID_CONTROL_ZOOM_RATIO_RANGE] = std::vector<float>({min, max});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorActiveArraySize(int x0, int y0,
+                                                           int x1, int y1) {
+  mEntryMap[ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE] =
+      std::vector<int32_t>({x0, y0, x1, y1});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPixelArraySize(int width,
+                                                          int height) {
+  mEntryMap[ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE] =
+      std::vector<int32_t>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setSensorPhysicalSize(float width,
+                                                        float height) {
+  mEntryMap[ANDROID_SENSOR_INFO_PHYSICAL_SIZE] =
+      std::vector<float>({width, height});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationRange(int32_t min,
+                                                                int32_t max) {
+  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_RANGE] =
+      std::vector<int32_t>({min, max});
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeCompensationStep(
+    const camera_metadata_rational step) {
+  mEntryMap[ANDROID_CONTROL_AE_COMPENSATION_STEP] =
+      asVectorOf<camera_metadata_rational>(step);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeExposureCompensation(
+    const int32_t exposureCompensation) {
+  mEntryMap[ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION] =
+      asVectorOf<int32_t>(exposureCompensation);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setControlAeState(
+    const camera_metadata_enum_android_control_ae_state aeState) {
+  mEntryMap[ANDROID_CONTROL_AE_STATE] = asVectorOf<uint8_t>(aeState);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsSceneFlicker(
+    const camera_metadata_enum_android_statistics_scene_flicker sceneFlicker) {
+  mEntryMap[ANDROID_STATISTICS_SCENE_FLICKER] =
+      asVectorOf<uint8_t>(sceneFlicker);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsHotPixelMapMode(
+    const camera_metadata_enum_android_statistics_hot_pixel_map_mode
+        hotPixelMapMode) {
+  mEntryMap[ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE] =
+      asVectorOf<uint8_t>(hotPixelMapMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setStatisticsLensShadingMapMode(
+    const camera_metadata_enum_android_statistics_lens_shading_map_mode
+        lensShadingMapMode) {
+  mEntryMap[ANDROID_STATISTICS_LENS_SHADING_MAP_MODE] =
+      asVectorOf<uint8_t>(lensShadingMapMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setLensOpticalStabilizationMode(
+    const camera_metadata_enum_android_lens_optical_stabilization_mode_t
+        opticalStabilizationMode) {
+  mEntryMap[ANDROID_LENS_OPTICAL_STABILIZATION_MODE] =
+      asVectorOf<uint8_t>(opticalStabilizationMode);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableRequestKeys(
+    const std::vector<int32_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS] = keys;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableResultKeys(
+    const std::vector<int32_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_RESULT_KEYS] = keys;
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCapabilities(
+    const std::vector<camera_metadata_enum_android_request_available_capabilities_t>&
+        capabilities) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CAPABILITIES] =
+      convertTo<uint8_t>(capabilities);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys(
+    const std::vector<camera_metadata_tag_t>& keys) {
+  mEntryMap[ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS] =
+      convertTo<int32_t>(keys);
+  return *this;
+}
+
+MetadataBuilder& MetadataBuilder::setAvailableCharacteristicKeys() {
+  mExtendWithAvailableCharacteristicsKeys = true;
+  return *this;
+}
+
+std::unique_ptr<aidl::android::hardware::camera::device::CameraMetadata>
+MetadataBuilder::build() {
+  if (mExtendWithAvailableCharacteristicsKeys) {
+    std::vector<camera_metadata_tag_t> availableKeys;
+    availableKeys.reserve(mEntryMap.size());
+    for (const auto& [key, _] : mEntryMap) {
+      if (key != ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS) {
+        availableKeys.push_back(key);
+      }
+    }
+    setAvailableCharacteristicKeys(availableKeys);
+  }
+
+  CameraMetadata metadataHelper;
+  for (const auto& entry : mEntryMap) {
+    status_t ret = std::visit(
+        [&](auto&& arg) {
+          return metadataHelper.update(entry.first, arg.data(), arg.size());
+        },
+        entry.second);
+    if (ret != NO_ERROR) {
+      ALOGE("Failed to update metadata with key %d - %s: %s", entry.first,
+            get_camera_metadata_tag_name(entry.first),
+            ::android::statusToString(ret).c_str());
+      return nullptr;
+    }
+  }
+
+  const camera_metadata_t* metadata = metadataHelper.getAndLock();
+  if (metadata == nullptr) {
+    ALOGE(
+        "Failure when constructing metadata -> CameraMetadata helper returned "
+        "nullptr");
+    return nullptr;
+  }
+
+  auto aidlMetadata =
+      std::make_unique<aidl::android::hardware::camera::device::CameraMetadata>();
+  const uint8_t* data_ptr = reinterpret_cast<const uint8_t*>(metadata);
+  aidlMetadata->metadata.assign(data_ptr,
+                                data_ptr + get_camera_metadata_size(metadata));
+  metadataHelper.unlock(metadata);
+
+  return aidlMetadata;
+}
+
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_QUALITY, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+int32_t getJpegOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_ORIENTATION,
+                                    &entry) != OK) {
+    return 0;
+  }
+
+  return *entry.data.i32;
+}
+
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_SIZE,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return Resolution(entry.data.i32[0], entry.data.i32[1]);
+}
+
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_THUMBNAIL_QUALITY,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return *entry.data.i32;
+}
+
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, &entry) != OK) {
+    return {};
+  }
+
+  std::vector<Resolution> thumbnailSizes;
+  thumbnailSizes.reserve(entry.count / 2);
+  for (int i = 0; i < entry.count; i += 2) {
+    thumbnailSizes.emplace_back(entry.data.i32[i], entry.data.i32[i + 1]);
+  }
+  return thumbnailSizes;
+}
+
+std::optional<FpsRange> getFpsRange(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry) != OK ||
+      entry.count != 2) {
+    return {};
+  }
+
+  FpsRange range{.minFps = entry.data.i32[0], .maxFps = entry.data.i32[1]};
+  return range;
+}
+
+std::optional<camera_metadata_enum_android_control_capture_intent>
+getCaptureIntent(const aidl::android::hardware::camera::device::CameraMetadata&
+                     cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_CAPTURE_INTENT,
+                                    &entry) != OK) {
+    return {};
+  }
+
+  return static_cast<camera_metadata_enum_android_control_capture_intent>(
+      entry.data.u8[0]);
+}
+
+std::optional<GpsCoordinates> getGpsCoordinates(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_COORDINATES,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  GpsCoordinates coordinates{.latitude = entry.data.d[0],
+                             .longitude = entry.data.d[1],
+                             .altitude = entry.data.d[2]};
+
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_JPEG_GPS_TIMESTAMP,
+                                    &entry) == OK) {
+    coordinates.timestamp = entry.data.i64[0];
+  }
+
+  // According to types.hal, the string describing the GPS processing method has
+  // a 32 characters size
+  static constexpr float kGpsProviderStringLength = 32;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry) == OK) {
+    coordinates.provider.assign(
+        reinterpret_cast<const char*>(entry.data.u8),
+        std::min(entry.count, static_cast<size_t>(kGpsProviderStringLength)));
+  }
+
+  return coordinates;
+}
+
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_LENS_FACING, &entry) !=
+      OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<camera_metadata_enum_android_lens_facing>(entry.data.u8[0]);
+}
+
+std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+getPrecaptureTrigger(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(
+          metadata, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<camera_metadata_enum_android_control_ae_precapture_trigger>(
+      entry.data.u8[0]);
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
new file mode 100644
index 0000000..d0af0c1
--- /dev/null
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -0,0 +1,491 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
+
+#include <chrono>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <variant>
+#include <vector>
+
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "system/camera_metadata.h"
+#include "util/Util.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Convenience builder for the
+// aidl::android::hardware::camera::device::CameraMetadata.
+//
+// Calling the same builder setter multiple will overwrite the value.
+// This class is not thread-safe.
+class MetadataBuilder {
+ public:
+  struct StreamConfiguration {
+    int32_t width = 0;
+    int32_t height = 0;
+    int32_t format = 0;
+    // Minimal frame duration - corresponds to maximal FPS for given format.
+    // See ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS in CameraMetadataTag.aidl.
+    std::chrono::nanoseconds minFrameDuration{0};
+    // Minimal stall duration.
+    // See ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+    std::chrono::nanoseconds minStallDuration{0};
+  };
+
+  struct ControlRegion {
+    int32_t x0 = 0;
+    int32_t y0 = 0;
+    int32_t x1 = 0;
+    int32_t y1 = 0;
+    int32_t weight = 0;
+  };
+
+  MetadataBuilder() = default;
+  ~MetadataBuilder() = default;
+
+  // See ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL in CameraMetadataTag.aidl.
+  MetadataBuilder& setSupportedHardwareLevel(
+      camera_metadata_enum_android_info_supported_hardware_level_t hwLevel);
+
+  // See ANDROID_INFO_DEVICE_ID in CameraMetadataTag.aidl.
+  MetadataBuilder& setDeviceId(int32_t deviceId);
+
+  // Whether this camera device has a flash unit
+  // See ANDROID_FLASH_INFO_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setFlashAvailable(bool flashAvailable);
+
+  // See FLASH_STATE in CaptureResult.java.
+  MetadataBuilder& setFlashState(
+      camera_metadata_enum_android_flash_state_t flashState);
+
+  // See FLASH_MODE in CaptureRequest.java.
+  MetadataBuilder& setFlashMode(
+      camera_metadata_enum_android_flash_mode_t flashMode);
+
+  // See ANDROID_LENS_FACING in CameraMetadataTag.aidl.
+  MetadataBuilder& setLensFacing(
+      camera_metadata_enum_android_lens_facing lensFacing);
+
+  // See ANDROID_SENSOR_READOUT_TIMESTAMP in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorReadoutTimestamp(
+      camera_metadata_enum_android_sensor_readout_timestamp_t
+          sensorReadoutTimestamp);
+
+  // See ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableFocalLengths(
+      const std::vector<float>& focalLengths);
+
+  // See ANDROID_LENS_FOCAL_LENGTH in CameraMetadataTag.aidl.
+  MetadataBuilder& setFocalLength(float focalLength);
+
+  // See ANDROID_SENSOR_ORIENTATION in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorOrientation(int32_t sensorOrientation);
+
+  // Time at start of exposure of first row of the image
+  // sensor active array, in nanoseconds.
+  //
+  // See ANDROID_SENSOR_TIMESTAMP in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorTimestamp(std::chrono::nanoseconds timestamp);
+
+  // See SENSOR_INFO_TIMESTAMP_SOURCE in CameraCharacteristic.java.
+  MetadataBuilder& setSensorTimestampSource(
+      camera_metadata_enum_android_sensor_info_timestamp_source_t timestampSource);
+
+  // See ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorActiveArraySize(int x0, int y0, int x1, int y1);
+
+  // See ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorPixelArraySize(int width, int height);
+
+  // See ANDROID_SENSOR_INFO_PHYSICAL_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setSensorPhysicalSize(float width, float height);
+
+  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableFaceDetectModes(
+      const std::vector<camera_metadata_enum_android_statistics_face_detect_mode_t>&
+          faceDetectMode);
+
+  // See SENSOR_AVAILABLE_TEST_PATTERN_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableTestPatternModes(
+      const std::vector<camera_metadata_enum_android_sensor_test_pattern_mode>&
+          testPatternModes);
+
+  // See ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES in CameraCharacteristics.java
+  MetadataBuilder& setAvailableStreamUseCases(
+      const std::vector<
+          camera_metadata_enum_android_scaler_available_stream_use_cases>& availableUseCases);
+
+  // See ANDROID_STATISTICS_FACE_DETECT_MODE in CaptureRequest.java.
+  MetadataBuilder& setFaceDetectMode(
+      camera_metadata_enum_android_statistics_face_detect_mode_t faceDetectMode);
+
+  // Sets available stream configurations along with corresponding minimal frame
+  // durations (corresponding to max fps) and stall durations.
+  //
+  // See ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+  // ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS and
+  // ANDROID_SCALER_AVAILABLE_STALL_DURATIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableOutputStreamConfigurations(
+      const std::vector<StreamConfiguration>& streamConfigurations);
+
+  // See COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableAberrationCorrectionModes(
+      const std::vector<
+          camera_metadata_enum_android_color_correction_aberration_mode>&
+          aberrationCorectionModes);
+
+  // See COLOR_CORRECTION_ABERRATION_MODE in CaptureRequest.java.
+  MetadataBuilder& setAberrationCorrectionMode(
+      camera_metadata_enum_android_color_correction_aberration_mode
+          aberrationCorrectionMode);
+
+  // See NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setAvailableNoiseReductionModes(
+      const std::vector<camera_metadata_enum_android_noise_reduction_mode>&
+          noiseReductionModes);
+
+  // See NOISE_REDUCTION_MODE in CaptureRequest.java.
+  MetadataBuilder& setNoiseReductionMode(
+      camera_metadata_enum_android_noise_reduction_mode noiseReductionMode);
+
+  // See REQUEST_PARTIAL_RESULT_COUNT in CameraCharacteristics.java.
+  MetadataBuilder& setRequestPartialResultCount(int partialResultCount);
+
+  // See SCALER_CROPPING_TYPE in CameraCharacteristics.java.
+  MetadataBuilder& setCroppingType(
+      camera_metadata_enum_android_scaler_cropping_type croppingType);
+
+  // See STATISTICS_INFO_MAX_FACE_COUNT in CameraCharacteristic.java.
+  MetadataBuilder& setMaxFaceCount(int maxFaceCount);
+
+  // See ANDROID_CONTROL_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_mode_t>&
+          availableModes);
+
+  // See ANDROID_CONTROL_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlMode(
+      camera_metadata_enum_android_control_mode_t mode);
+
+  // See ANDROID_CONTROL_AVAILABLE_SCENE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableSceneModes(
+      const std::vector<camera_metadata_enum_android_control_scene_mode>&
+          availableSceneModes);
+
+  // See ANDROID_CONTROL_SCENE_MODE in CameraMetadataTag.aidl
+  MetadataBuilder& setControlSceneMode(
+      camera_metadata_enum_android_control_scene_mode sceneMode);
+
+  // See ANDROID_CONTROL_AVAILABLE_EFFECTS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableEffects(
+      const std::vector<camera_metadata_enum_android_control_effect_mode>&
+          availableEffects);
+
+  // See CONTROL_EFFECT_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlEffectMode(
+      camera_metadata_enum_android_control_effect_mode_t effectMode);
+
+  // See ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableVideoStabilizationModes(
+      const std::vector<
+          camera_metadata_enum_android_control_video_stabilization_mode_t>&
+          videoStabilizationModes);
+
+  // See ANDROID_CONTROL_VIDEO_STABILIZATION_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlVideoStabilizationMode(
+      camera_metadata_enum_android_control_video_stabilization_mode
+          stabilizationMode);
+
+  // See CONTROL_AE_AVAILABLE_ANTIBANDING_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setControlAeAvailableAntibandingModes(
+      const std::vector<camera_metadata_enum_android_control_ae_antibanding_mode_t>&
+          antibandingModes);
+
+  // See CONTROL_AE_ANTIBANDING_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAeAntibandingMode(
+      camera_metadata_enum_android_control_ae_antibanding_mode_t antibandingMode);
+
+  // See ANDROID_CONTROL_AE_COMPENSATION_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeCompensationRange(int32_t min, int32_t max);
+
+  // See ANDROID_CONTROL_AE_COMPENSATION_STEP in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeCompensationStep(camera_metadata_rational step);
+
+  // See ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeExposureCompensation(int32_t exposureCompensation);
+
+  // See ANDROID_CONTROL_AE_AVAILABLE_MODES in CameraCharacteristics.java.
+  MetadataBuilder& setControlAeAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_ae_mode_t>& modes);
+
+  // See ANDROID_CONTROL_AE_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAeMode(
+      camera_metadata_enum_android_control_ae_mode_t step);
+
+  // See ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER in CaptureRequest.java.
+  MetadataBuilder& setControlAePrecaptureTrigger(
+      camera_metadata_enum_android_control_ae_precapture_trigger_t trigger);
+
+  // See ANDROID_CONTROL_AF_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfAvailableModes(
+      const std::vector<camera_metadata_enum_android_control_af_mode_t>&
+          availableModes);
+
+  // See ANDROID_CONTROL_AF_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfMode(
+      const camera_metadata_enum_android_control_af_mode_t mode);
+
+  // See ANDROID_CONTROL_AF_TRIGGER_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfTrigger(
+      const camera_metadata_enum_android_control_af_trigger_t trigger);
+
+  // See ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeAvailableFpsRanges(
+      const std::vector<FpsRange>& fpsRanges);
+
+  // See ANDROID_CONTROL_AE_TARGET_FPS_RANGE in CaptureRequest.java.
+  MetadataBuilder& setControlAeTargetFpsRange(FpsRange fpsRange);
+
+  // See ANDROID_CONTROL_CAPTURE_INTENT in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlCaptureIntent(
+      camera_metadata_enum_android_control_capture_intent_t intent);
+
+  // See ANDROID_CONTROL_MAX_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlMaxRegions(int32_t maxAeRegions,
+                                        int32_t maxAwbRegions,
+                                        int32_t maxAfRegions);
+
+  // See ANDROID_CONTROL_AWB_AVAILABLE_MODES in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAvailableAwbModes(
+      const std::vector<camera_metadata_enum_android_control_awb_mode>& awbModes);
+
+  // See ANDROID_CONTROL_AWB_AVAILABLE_MODE in CaptureRequest.java.
+  MetadataBuilder& setControlAwbMode(
+      camera_metadata_enum_android_control_awb_mode awb);
+
+  // See CONTROL_AWB_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbLockAvailable(bool awbLockAvailable);
+
+  // See CONTROL_AWB_LOCK in CameraMetadataTag.aidl
+  MetadataBuilder& setControlAwbLock(
+      camera_metadata_enum_android_control_awb_lock awbLock);
+
+  // See CONTROL_AE_LOCK_AVAILABLE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeLockAvailable(bool aeLockAvailable);
+
+  // See CONTROL_AE_LOCK in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeLock(
+      camera_metadata_enum_android_control_ae_lock aeLock);
+
+  // See CONTROL_AE_STATE in CameraMetadataTag.aidl
+  MetadataBuilder& setControlAeState(
+      camera_metadata_enum_android_control_ae_state aeState);
+
+  // See ANDROID_CONTROL_AE_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAeRegions(
+      const std::vector<ControlRegion>& aeRegions);
+
+  // See ANDROID_CONTROL_AWB_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbRegions(
+      const std::vector<ControlRegion>& awbRegions);
+
+  // See ANDROID_CONTROL_AWB_STATE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAwbState(
+      camera_metadata_enum_android_control_awb_state awbState);
+
+  // See ANDROID_SCALER_CROP_REGION in CaptureRequest.java.
+  MetadataBuilder& setCropRegion(int32_t x, int32_t y, int32_t width,
+                                 int32_t height);
+
+  // See ANDROID_CONTROL_AF_REGIONS in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfRegions(
+      const std::vector<ControlRegion>& afRegions);
+
+  // See ANDROID_CONTROL_AF_STATE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlAfState(
+      camera_metadata_enum_android_control_af_state aeftate);
+
+  // The size of the compressed JPEG image, in bytes.
+  //
+  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
+  MetadataBuilder& setMaxJpegSize(int32_t size);
+
+  // See SENSOR_INFO_MAX_FRAME_DURATION in CameraCharacteristic.java.
+  MetadataBuilder& setMaxFrameDuration(std::chrono::nanoseconds duration);
+
+  // See JPEG_AVAILABLE_THUMBNAIL_SIZES in CameraCharacteristic.java.
+  MetadataBuilder& setJpegAvailableThumbnailSizes(
+      const std::vector<Resolution>& thumbnailSizes);
+
+  // See ANDROID_JPEG_GPS_COORDINATES.
+  MetadataBuilder& setJpegGpsCoordinates(const GpsCoordinates& gpsCoordinates);
+
+  // See JPEG_ORIENTATION in CaptureRequest.java.
+  MetadataBuilder& setJpegOrientation(int32_t orientation);
+
+  // See JPEG_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegQuality(uint8_t quality);
+
+  // See JPEG_THUMBNAIL_SIZE in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailSize(int width, int height);
+
+  // See JPEG_THUMBNAIL_QUALITY in CaptureRequest.java.
+  MetadataBuilder& setJpegThumbnailQuality(uint8_t quality);
+
+  // The maximum numbers of different types of output streams
+  // that can be configured and used simultaneously by a camera device.
+  //
+  // See ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS in CameraMetadataTag.aidl.
+  MetadataBuilder& setMaxNumberOutputStreams(int32_t maxRawStreams,
+                                             int32_t maxProcessedStreams,
+                                             int32_t maxStallStreams);
+
+  // See ANDROID_SYNC_MAX_LATENCY in CameraMetadataTag.aidl.
+  MetadataBuilder& setSyncMaxLatency(
+      camera_metadata_enum_android_sync_max_latency setSyncMaxLatency);
+
+  // See REQUEST_PIPELINE_MAX_DEPTH in CameraCharacteristic.java.
+  MetadataBuilder& setPipelineMaxDepth(uint8_t maxDepth);
+
+  // See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+  MetadataBuilder& setPipelineDepth(uint8_t depth);
+
+  // See ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableMaxDigitalZoom(const float maxZoom);
+
+  // See ANDROID_CONTROL_ZOOM_RATIO_RANGE in CameraMetadataTag.aidl.
+  MetadataBuilder& setControlZoomRatioRange(float min, float max);
+
+  // See ANDROID_STATISTICS_SCENE_FLICKER in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsSceneFlicker(
+      camera_metadata_enum_android_statistics_scene_flicker sceneFlicker);
+
+  // See ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsHotPixelMapMode(
+      camera_metadata_enum_android_statistics_hot_pixel_map_mode mode);
+
+  // See ANDROID_STATISTICS_LENS_SHADING_MAP_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setStatisticsLensShadingMapMode(
+      camera_metadata_enum_android_statistics_lens_shading_map_mode
+          lensShadingMapMode);
+
+  // See ANDROID_LENS_OPTICAL_STABILIZATION_MODE in CameraMetadataTag.aidl.
+  MetadataBuilder& setLensOpticalStabilizationMode(
+      camera_metadata_enum_android_lens_optical_stabilization_mode_t
+          opticalStabilizationMode);
+
+  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableRequestCapabilities(
+      const std::vector<
+          camera_metadata_enum_android_request_available_capabilities_t>&
+          requestCapabilities);
+
+  // A list of all keys that the camera device has available to use with
+  // CaptureRequest.
+  //
+  // See ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableRequestKeys(const std::vector<int32_t>& keys);
+
+  // A list of all keys that the camera device has available to use with
+  // CaptureResult.
+  //
+  // See ANDROID_RESULT_AVAILABLE_REQUEST_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableResultKeys(const std::vector<int32_t>& keys);
+
+  // See ANDROID_REQUEST_AVAILABLE_CAPABILITIES in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableCapabilities(
+      const std::vector<
+          camera_metadata_enum_android_request_available_capabilities_t>&
+          capabilities);
+
+  // A list of all keys that the camera device has available to use.
+  //
+  // See ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS in CameraMetadataTag.aidl.
+  MetadataBuilder& setAvailableCharacteristicKeys(
+      const std::vector<camera_metadata_tag_t>& keys);
+
+  // Extends metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
+  // containing all set tags.
+  MetadataBuilder& setAvailableCharacteristicKeys();
+
+  // Build CameraMetadata instance.
+  //
+  // Returns nullptr in case something went wrong.
+  std::unique_ptr<::aidl::android::hardware::camera::device::CameraMetadata>
+  build();
+
+ private:
+  // Maps metadata tags to vectors of values for the given tag.
+  std::map<
+      camera_metadata_tag_t,
+      std::variant<std::vector<int64_t>, std::vector<int32_t>,
+                   std::vector<uint8_t>, std::vector<float>,
+                   std::vector<camera_metadata_rational_t>, std::vector<double>>>
+      mEntryMap;
+  // Extend metadata with ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.
+  bool mExtendWithAvailableCharacteristicsKeys = false;
+};
+
+// Returns JPEG_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Return JPEG_ORIENTATION from metadata, or 0 if the key is not present
+int32_t getJpegOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_SIZE from metadata, or nullopt if the key is not present.
+std::optional<Resolution> getJpegThumbnailSize(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_THUMBNAIL_QUALITY from metadata, or nullopt if the key is not present.
+std::optional<int32_t> getJpegThumbnailQuality(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns JPEG_AVAILABLE_THUMBNAIL_SIZES from metadata, or nullopt if the key
+// is not present.
+std::vector<Resolution> getJpegAvailableThumbnailSizes(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<FpsRange> getFpsRange(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_capture_intent> getCaptureIntent(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+// Returns ANDROID_JPEG_GPS_COORDINATES in a GpsCoordinate object or nullopt if
+// the key is not present.
+std::optional<GpsCoordinates> getGpsCoordinates(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+    const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
+std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+getPrecaptureTrigger(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_METADATAUTIL_H
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.cc b/services/camera/virtualcamera/util/TestPatternHelper.cc
index a00a1b8..274996a 100644
--- a/services/camera/virtualcamera/util/TestPatternHelper.cc
+++ b/services/camera/virtualcamera/util/TestPatternHelper.cc
@@ -15,6 +15,7 @@
  */
 
 // #define LOG_NDEBUG 0
+
 #define LOG_TAG "TestPatternHelper"
 
 #include "TestPatternHelper.h"
@@ -23,6 +24,9 @@
 #include <cstdint>
 
 #include "log/log.h"
+#include "nativebase/nativebase.h"
+#include "system/graphics.h"
+#include "ui/GraphicBuffer.h"
 #include "utils/Errors.h"
 
 namespace android {
@@ -31,6 +35,10 @@
 
 namespace {
 
+using namespace std::chrono_literals;
+
+static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
+
 uint8_t julia(const std::complex<float> n, const std::complex<float> c) {
   std::complex<float> z = n;
   for (int i = 0; i < 64; i++) {
@@ -40,72 +48,103 @@
   return 0xff;
 }
 
-uint8_t pixelToFractal(const int x, const int y, const std::complex<float> c) {
-  std::complex<float> n(float(x) / 640.0f - 0.5, float(y) / 480.0f - 0.5);
+uint8_t pixelToFractal(const int x, const int y, const int width,
+                       const int height, const std::complex<float> c) {
+  std::complex<float> n(float(x) / float(width) - 0.5,
+                        float(y) / float(height) - 0.5);
   return julia(n * 5.f, c);
 }
 
-void renderTestPatternYcbCr420(uint8_t* data_ptr, const int width,
+void renderTestPatternYcbCr420(const android_ycbcr& ycbr, const int width,
                                const int height, const int frameNumber) {
   float time = float(frameNumber) / 120.0f;
   const std::complex<float> c(std::sin(time), std::cos(time));
 
-  uint8_t* y_data = data_ptr;
-  uint8_t* uv_data = static_cast<uint8_t*>(y_data + width * height);
+  uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
+  uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
+  uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
 
-  for (int i = 0; i < width; ++i) {
-    for (int j = 0; j < height; ++j) {
-      y_data[j * width + i] = pixelToFractal(i, j, c * 0.78f);
-      if ((i & 1) && (j & 1)) {
-        uv_data[((j / 2) * (width / 2) + i / 2) * 2] =
-            static_cast<uint8_t>((float(i) / float(width)) * 255.f);
-        uv_data[((j / 2) * (width / 2) + i / 2) * 2 + 1] =
-            static_cast<uint8_t>((float(j) / float(height)) * 255.f);
-      }
+  for (int row = 0; row < height; row++) {
+    for (int col = 0; col < width; col++) {
+      y[row * ycbr.ystride + col] =
+          pixelToFractal(col, row, width, height, c * 0.78f);
+    }
+  }
+
+  int cWidth = width / 2;
+  int cHeight = height / 2;
+  for (int row = 0; row < cHeight; row++) {
+    for (int col = 0; col < cWidth; col++) {
+      cb[row * ycbr.cstride + col * ycbr.chroma_step] =
+          static_cast<uint8_t>((float(col) / float(cWidth)) * 255.f);
+      cr[row * ycbr.cstride + col * ycbr.chroma_step] =
+          static_cast<uint8_t>((float(row) / float(cHeight)) * 255.f);
     }
   }
 }
 
 }  // namespace
 
-// This is just to see some meaningfull image in the buffer for testing, only
-// works with YcbCr420.
-void renderTestPatternYCbCr420(const std::shared_ptr<AHardwareBuffer> buffer,
-                               const int frameNumber, const int fence) {
-  AHardwareBuffer_Planes planes_info;
-
-  AHardwareBuffer_Desc hwBufferDesc;
-  AHardwareBuffer_describe(buffer.get(), &hwBufferDesc);
-
-  const int width = hwBufferDesc.width;
-  const int height = hwBufferDesc.height;
-
-  int result = AHardwareBuffer_lockPlanes(buffer.get(),
-                                          AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
-                                          fence, nullptr, &planes_info);
-  if (result != OK) {
-    ALOGE("%s: Failed to lock planes: %d", __func__, result);
+void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
+  if (surface == nullptr) {
+    ALOGE("%s: null surface, skipping render", __func__);
     return;
   }
 
-  renderTestPatternYcbCr420(
-      reinterpret_cast<uint8_t*>(planes_info.planes[0].data), width, height,
-      frameNumber);
+  ANativeWindowBuffer* buffer;
+  int fenceFd;
+  int ret = ANativeWindow_dequeueBuffer(surface.get(), &buffer, &fenceFd);
+  if (ret != NO_ERROR) {
+    ALOGE(
+        "%s: Error while deuqueing buffer from surface, "
+        "ANativeWindow_dequeueBuffer returned %d",
+        __func__, ret);
+    return;
+  }
 
-  AHardwareBuffer_unlock(buffer.get(), nullptr);
-}
+  if (buffer == nullptr) {
+    ALOGE("%s: ANativeWindowBuffer is null after dequeing", __func__);
+    return;
+  }
 
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
-  ANativeWindow_Buffer buffer;
-  surface->lock(&buffer, nullptr);
+  sp<Fence> fence = sp<Fence>::make(fenceFd);
+  if (fence->isValid()) {
+    ret = fence->wait(kAcquireFenceTimeout.count());
+    if (ret != NO_ERROR) {
+      ALOGE("%s: Timeout while waiting for the fence to clear", __func__);
+      ANativeWindow_queueBuffer(surface.get(), buffer, fence->dup());
+      return;
+    }
+  }
 
-  ALOGV("buffer: %dx%d stride %d, pixfmt %d", buffer.width, buffer.height,
-        buffer.stride, buffer.format);
+  sp<GraphicBuffer> gBuffer = GraphicBuffer::from(buffer);
+  android_ycbcr ycbr;
 
-  renderTestPatternYcbCr420(reinterpret_cast<uint8_t*>(buffer.bits),
-                            buffer.width, buffer.height, frameNumber);
+  ret = gBuffer->lockAsyncYCbCr(GraphicBuffer::USAGE_SW_WRITE_OFTEN, &ycbr,
+                                fence->dup());
+  if (ret != NO_ERROR) {
+    ALOGE("%s: Failed to lock buffer retrieved from surface, ret %d", __func__,
+          ret);
+    return;
+  }
 
-  surface->unlockAndPost();
+  renderTestPatternYcbCr420(ycbr, gBuffer->getWidth(), gBuffer->getHeight(),
+                            frameNumber);
+
+  ret = gBuffer->unlock();
+  if (ret != NO_ERROR) {
+    ALOGE("%s: Failed to unlock buffer, ret %d", __func__, ret);
+    return;
+  }
+
+  ret = ANativeWindow_queueBuffer(surface.get(), buffer, /*fenceFd=*/-1);
+  if (ret != NO_ERROR) {
+    ALOGE(
+        "%s: Error while queing buffer to surface, ANativeWindow_queueBuffer "
+        "returned %d",
+        __func__, ret);
+    return;
+  }
 }
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.h b/services/camera/virtualcamera/util/TestPatternHelper.h
index aca1cdd..f842b29 100644
--- a/services/camera/virtualcamera/util/TestPatternHelper.h
+++ b/services/camera/virtualcamera/util/TestPatternHelper.h
@@ -17,20 +17,12 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
 
-#include <memory>
-
-#include "android/hardware_buffer.h"
 #include "gui/Surface.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
-// Helper function filling hardware buffer with test pattern for debugging /
-// testing purposes.
-void renderTestPatternYCbCr420(std::shared_ptr<AHardwareBuffer> buffer,
-                               int frameNumber, int fence = -1);
-
 // Helper function for rendering test pattern into Surface.
 void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber);
 
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index df771b1..0c607d7 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -20,8 +20,13 @@
 
 #include <algorithm>
 #include <array>
+#include <cstdint>
+#include <memory>
 
+#include "android/hardware_buffer.h"
 #include "jpeglib.h"
+#include "ui/GraphicBuffer.h"
+#include "utils/Errors.h"
 
 namespace android {
 namespace companion {
@@ -35,10 +40,87 @@
 // TODO(b/301023410) - Query actual max texture size.
 constexpr int kMaxTextureSize = 2048;
 constexpr int kLibJpegDctSize = DCTSIZE;
+constexpr int kMaxFpsUpperLimit = 60;
 
 constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
                                                   Format::RGBA_8888};
 
+YCbCrLockGuard::YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                               const uint32_t usageFlags)
+    : mHwBuffer(hwBuffer) {
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+  mLockStatus = gBuffer->lockYCbCr(usageFlags, &mYCbCr);
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+YCbCrLockGuard::~YCbCrLockGuard() {
+  if (getStatus() != OK) {
+    return;
+  }
+
+  GraphicBuffer* gBuffer = GraphicBuffer::fromAHardwareBuffer(mHwBuffer.get());
+  if (gBuffer == nullptr) {
+    return;
+  }
+  gBuffer->unlock();
+  status_t status = gBuffer->unlock();
+  if (status != NO_ERROR) {
+    ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
+  }
+}
+
+status_t YCbCrLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const android_ycbcr& YCbCrLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked YCbCrLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mYCbCr;
+}
+
+PlanesLockGuard::PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                                 const uint64_t usageFlags, sp<Fence> fence) {
+  if (hwBuffer == nullptr) {
+    ALOGE("%s: Attempting to lock nullptr buffer.", __func__);
+    return;
+  }
+
+  const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+  mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
+      hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
+  if (mLockStatus != OK) {
+    ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
+          statusToString(mLockStatus).c_str());
+  }
+}
+
+PlanesLockGuard::~PlanesLockGuard() {
+  if (getStatus() != OK || mHwBuffer == nullptr) {
+    return;
+  }
+  AHardwareBuffer_unlock(mHwBuffer.get(), /*fence=*/nullptr);
+}
+
+int PlanesLockGuard::getStatus() const {
+  return mLockStatus;
+}
+
+const AHardwareBuffer_Planes& PlanesLockGuard::operator*() const {
+  LOG_ALWAYS_FATAL_IF(getStatus() != OK,
+                      "Dereferencing unlocked PlanesLockGuard, status is %s",
+                      statusToString(mLockStatus).c_str());
+  return mPlanes;
+}
+
 sp<Fence> importFence(const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.size() != 1) {
     return sp<Fence>::make();
@@ -54,7 +136,7 @@
 
 // Returns true if specified format is supported for virtual camera input.
 bool isFormatSupportedForInput(const int width, const int height,
-                               const Format format) {
+                               const Format format, const int maxFps) {
   if (!isPixelFormatSupportedForInput(format)) {
     return false;
   }
@@ -64,16 +146,17 @@
     return false;
   }
 
-  if (width % kLibJpegDctSize != 0 || height % kLibJpegDctSize != 0) {
-    // Input dimension needs to be multiple of libjpeg DCT size.
-    // TODO(b/301023410) This restriction can be removed once we add support for
-    // unaligned jpeg compression.
+  if (maxFps <= 0 || maxFps > kMaxFpsUpperLimit) {
     return false;
   }
 
   return true;
 }
 
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution) {
+  return os << resolution.width << "x" << resolution.height;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/Util.h b/services/camera/virtualcamera/util/Util.h
index a73c99b..291e105 100644
--- a/services/camera/virtualcamera/util/Util.h
+++ b/services/camera/virtualcamera/util/Util.h
@@ -17,18 +17,84 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_UTIL_H
 
+#include <cmath>
 #include <cstdint>
+#include <memory>
+#include <optional>
+#include <string>
 
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/hardware/camera/common/Status.h"
 #include "aidl/android/hardware/camera/device/StreamBuffer.h"
 #include "android/binder_auto_utils.h"
+#include "android/hardware_buffer.h"
+#include "system/graphics.h"
 #include "ui/Fence.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
+// RAII utility class to safely lock AHardwareBuffer and obtain android_ycbcr
+// structure describing YUV plane layout.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class YCbCrLockGuard {
+ public:
+  YCbCrLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer, uint32_t usageFlags);
+  YCbCrLockGuard(YCbCrLockGuard&& other) = default;
+  ~YCbCrLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns android_ycbcr structure
+  // describing the layout.
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const android_ycbcr& operator*() const;
+
+  // Disable copy.
+  YCbCrLockGuard(const YCbCrLockGuard&) = delete;
+  YCbCrLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  android_ycbcr mYCbCr = {};
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
+// RAII utility class to safely lock AHardwareBuffer and obtain
+// AHardwareBuffer_Planes (Suitable for interacting with RGBA / BLOB buffers.
+//
+// Access to the buffer is locked immediatelly afer construction.
+class PlanesLockGuard {
+ public:
+  PlanesLockGuard(std::shared_ptr<AHardwareBuffer> hwBuffer,
+                  uint64_t usageFlags, sp<Fence> fence = nullptr);
+  PlanesLockGuard(PlanesLockGuard&& other) = default;
+  ~PlanesLockGuard();
+
+  // Returns OK if the buffer is successfully locked.
+  status_t getStatus() const;
+
+  // Dereferencing instance of this guard returns AHardwareBuffer_Planes
+  // structure describing the layout.
+  //
+  // Caller needs to check whether the buffer was successfully locked
+  // before dereferencing.
+  const AHardwareBuffer_Planes& operator*() const;
+
+  // Disable copy.
+  PlanesLockGuard(const PlanesLockGuard&) = delete;
+  PlanesLockGuard& operator=(const YCbCrLockGuard&) = delete;
+
+ private:
+  std::shared_ptr<AHardwareBuffer> mHwBuffer;
+  AHardwareBuffer_Planes mPlanes;
+  status_t mLockStatus = DEAD_OBJECT;
+};
+
 // Converts camera AIDL status to ndk::ScopedAStatus
 inline ndk::ScopedAStatus cameraStatus(
     const ::aidl::android::hardware::camera::common::Status status) {
@@ -50,7 +116,66 @@
 // Returns true if specified format is supported for virtual camera input.
 bool isFormatSupportedForInput(
     int width, int height,
-    ::aidl::android::companion::virtualcamera::Format format);
+    ::aidl::android::companion::virtualcamera::Format format, int maxFps);
+
+// Representation of resolution / size.
+struct Resolution {
+  Resolution() = default;
+  Resolution(const int w, const int h) : width(w), height(h) {
+  }
+
+  // Order by increasing pixel count, and by width for same pixel count.
+  bool operator<(const Resolution& other) const {
+    const int pixCount = width * height;
+    const int otherPixCount = other.width * other.height;
+    return pixCount == otherPixCount ? width < other.width
+                                     : pixCount < otherPixCount;
+  }
+
+  bool operator<=(const Resolution& other) const {
+    return *this == other || *this < other;
+  }
+
+  bool operator==(const Resolution& other) const {
+    return width == other.width && height == other.height;
+  }
+
+  int width = 0;
+  int height = 0;
+};
+
+struct FpsRange {
+  int32_t minFps;
+  int32_t maxFps;
+
+  bool operator<(const FpsRange& other) const {
+    return maxFps == other.maxFps ? minFps < other.minFps
+                                  : maxFps < other.maxFps;
+  }
+};
+
+struct GpsCoordinates {
+  // Represented by a double[] in metadata with index 0 for
+  // latitude and index 1 for longitude, 2 for altitude.
+  double_t latitude;
+  double_t longitude;
+  double_t altitude;
+  std::optional<int64_t> timestamp;
+  std::string provider;
+};
+
+inline bool isApproximatellySameAspectRatio(const Resolution r1,
+                                            const Resolution r2) {
+  static constexpr float kAspectRatioEpsilon = 0.05;
+  float aspectRatio1 =
+      static_cast<float>(r1.width) / static_cast<float>(r1.height);
+  float aspectRatio2 =
+      static_cast<float>(r2.width) / static_cast<float>(r2.height);
+
+  return std::abs(aspectRatio1 - aspectRatio2) < kAspectRatioEpsilon;
+}
+
+std::ostream& operator<<(std::ostream& os, const Resolution& resolution);
 
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/medialog/fuzzer/Android.bp b/services/medialog/fuzzer/Android.bp
index 84a1ce6..6b4ee5f 100644
--- a/services/medialog/fuzzer/Android.bp
+++ b/services/medialog/fuzzer/Android.bp
@@ -1,4 +1,5 @@
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 6b48075..5b4fca9 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -37,6 +37,8 @@
 #include "AAudioServiceEndpointPlay.h"
 #include "AAudioServiceEndpointMMAP.h"
 
+#include <com_android_media_aaudio.h>
+
 #define AAUDIO_BUFFER_CAPACITY_MIN    (4 * 512)
 #define AAUDIO_SAMPLE_RATE_DEFAULT    48000
 
@@ -148,9 +150,15 @@
 
         // Try other formats if the config from APM is the same as our current config.
         // Some HALs may report its format support incorrectly.
-        if ((previousConfig.format == config.format) &&
-                (previousConfig.sample_rate == config.sample_rate)) {
-            config.format = getNextFormatToTry(config.format);
+        if (previousConfig.format == config.format) {
+            if (previousConfig.sample_rate == config.sample_rate) {
+                config.format = getNextFormatToTry(config.format);
+            } else if (!com::android::media::aaudio::sample_rate_conversion()) {
+                ALOGI("%s() - AAudio SRC feature not enabled, different rates! %d != %d",
+                      __func__, previousConfig.sample_rate, config.sample_rate);
+                result = AAUDIO_ERROR_INVALID_RATE;
+                break;
+            }
         }
 
         ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index d9e7e2b..dc70c79 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -75,11 +75,7 @@
                         this, getState());
 
     // Stop the command thread before destroying.
-    if (mThreadEnabled) {
-        mThreadEnabled = false;
-        mCommandQueue.stopWaiting();
-        mCommandThread.stop();
-    }
+    stopCommandThread();
 }
 
 std::string AAudioServiceStreamBase::dumpHeader() {
@@ -194,26 +190,27 @@
 
 error:
     closeAndClear();
-    mThreadEnabled = false;
-    mCommandQueue.stopWaiting();
-    mCommandThread.stop();
+    stopCommandThread();
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close() {
     aaudio_result_t result = sendCommand(CLOSE, nullptr, true /*waitForReply*/, TIMEOUT_NANOS);
+    if (result == AAUDIO_ERROR_ALREADY_CLOSED) {
+        // AAUDIO_ERROR_ALREADY_CLOSED is not a really error but just indicate the stream has
+        // already been closed. In that case, there is no need to close the stream once more.
+        ALOGD("The stream(%d) is already closed", mHandle);
+        return AAUDIO_OK;
+    }
 
-    // Stop the command thread as the stream is closed.
-    mThreadEnabled = false;
-    mCommandQueue.stopWaiting();
-    mCommandThread.stop();
+    stopCommandThread();
 
     return result;
 }
 
 aaudio_result_t AAudioServiceStreamBase::close_l() {
     if (getState() == AAUDIO_STREAM_STATE_CLOSED) {
-        return AAUDIO_OK;
+        return AAUDIO_ERROR_ALREADY_CLOSED;
     }
 
     // This will stop the stream, just in case it was not already stopped.
@@ -766,3 +763,11 @@
         .record();
     return result;
 }
+
+void AAudioServiceStreamBase::stopCommandThread() {
+    bool threadEnabled = true;
+    if (mThreadEnabled.compare_exchange_strong(threadEnabled, false)) {
+        mCommandQueue.stopWaiting();
+        mCommandThread.stop();
+    }
+}
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index d5061b3..96a6d44 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -360,7 +360,7 @@
         EXIT_STANDBY,
     };
     AAudioThread            mCommandThread;
-    std::atomic<bool>       mThreadEnabled{false};
+    std::atomic_bool        mThreadEnabled{false};
     AAudioCommandQueue      mCommandQueue;
 
     int32_t                 mFramesPerBurst = 0;
@@ -400,6 +400,8 @@
                                 bool waitForReply = false,
                                 int64_t timeoutNanos = 0);
 
+    void stopCommandThread();
+
     aaudio_result_t closeAndClear();
 
     /**
diff --git a/services/oboeservice/fuzzer/Android.bp b/services/oboeservice/fuzzer/Android.bp
index 36a91a9..97825b3 100644
--- a/services/oboeservice/fuzzer/Android.bp
+++ b/services/oboeservice/fuzzer/Android.bp
@@ -19,6 +19,7 @@
  */
 
 package {
+    default_team: "trendy_team_media_framework_audio",
     // See: http://go/android-license-faq
     // A large-scale-change added 'default_applicable_licenses' to import
     // all of the 'license_kinds' from "frameworks_av_license"
@@ -37,6 +38,7 @@
     ],
     shared_libs: [
         "aaudio-aidl-cpp",
+        "com.android.media.aaudio-aconfig-cc",
         "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
diff --git a/services/tuner/TunerDemux.cpp b/services/tuner/TunerDemux.cpp
index 92fa970..a80a88e 100644
--- a/services/tuner/TunerDemux.cpp
+++ b/services/tuner/TunerDemux.cpp
@@ -50,7 +50,9 @@
 }
 
 TunerDemux::~TunerDemux() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mDemux = nullptr;
     mTunerService = nullptr;
 }
@@ -125,6 +127,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDemux::close() {
+    isClosed = true;
     return mDemux->close();
 }
 
diff --git a/services/tuner/TunerDemux.h b/services/tuner/TunerDemux.h
index 0c71987..17dd7e0 100644
--- a/services/tuner/TunerDemux.h
+++ b/services/tuner/TunerDemux.h
@@ -64,6 +64,7 @@
     shared_ptr<IDemux> mDemux;
     int mDemuxId;
     shared_ptr<TunerService> mTunerService;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDescrambler.cpp b/services/tuner/TunerDescrambler.cpp
index ffe0be9..c1214bd 100644
--- a/services/tuner/TunerDescrambler.cpp
+++ b/services/tuner/TunerDescrambler.cpp
@@ -41,7 +41,9 @@
 }
 
 TunerDescrambler::~TunerDescrambler() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mDescrambler = nullptr;
 }
 
@@ -75,6 +77,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDescrambler::close() {
+    isClosed = true;
     return mDescrambler->close();
 }
 
diff --git a/services/tuner/TunerDescrambler.h b/services/tuner/TunerDescrambler.h
index b1d5fb9..434fc5d 100644
--- a/services/tuner/TunerDescrambler.h
+++ b/services/tuner/TunerDescrambler.h
@@ -48,6 +48,7 @@
 
 private:
     shared_ptr<IDescrambler> mDescrambler;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerDvr.cpp b/services/tuner/TunerDvr.cpp
index fcee966..0e1b0fa 100644
--- a/services/tuner/TunerDvr.cpp
+++ b/services/tuner/TunerDvr.cpp
@@ -37,7 +37,9 @@
 }
 
 TunerDvr::~TunerDvr() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mDvr = nullptr;
 }
 
@@ -92,6 +94,7 @@
 }
 
 ::ndk::ScopedAStatus TunerDvr::close() {
+    isClosed = true;
     return mDvr->close();
 }
 
diff --git a/services/tuner/TunerDvr.h b/services/tuner/TunerDvr.h
index 2330e7b..1fb7a5c 100644
--- a/services/tuner/TunerDvr.h
+++ b/services/tuner/TunerDvr.h
@@ -77,6 +77,7 @@
 private:
     shared_ptr<IDvr> mDvr;
     DvrType mType;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 478e7ea..84a2b4e 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -47,7 +47,9 @@
         mTunerService(tuner) {}
 
 TunerFilter::~TunerFilter() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     freeSharedFilterToken("");
     {
         Mutex::Autolock _l(mLock);
@@ -266,6 +268,7 @@
     mStarted = false;
     mShared = false;
     mClientPid = -1;
+    isClosed = true;
 
     return res;
 }
diff --git a/services/tuner/TunerFilter.h b/services/tuner/TunerFilter.h
index f6178c4..06735aa 100644
--- a/services/tuner/TunerFilter.h
+++ b/services/tuner/TunerFilter.h
@@ -116,6 +116,7 @@
     shared_ptr<FilterCallback> mFilterCallback;
     Mutex mLock;
     shared_ptr<TunerService> mTunerService;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerFrontend.cpp b/services/tuner/TunerFrontend.cpp
index 1e93d95..081596a 100644
--- a/services/tuner/TunerFrontend.cpp
+++ b/services/tuner/TunerFrontend.cpp
@@ -37,7 +37,9 @@
 }
 
 TunerFrontend::~TunerFrontend() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mFrontend = nullptr;
     mId = -1;
 }
@@ -89,6 +91,7 @@
 }
 
 ::ndk::ScopedAStatus TunerFrontend::close() {
+    isClosed = true;
     return mFrontend->close();
 }
 
diff --git a/services/tuner/TunerFrontend.h b/services/tuner/TunerFrontend.h
index da471fb..9612124 100644
--- a/services/tuner/TunerFrontend.h
+++ b/services/tuner/TunerFrontend.h
@@ -83,6 +83,7 @@
 private:
     int mId;
     shared_ptr<IFrontend> mFrontend;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerLnb.cpp b/services/tuner/TunerLnb.cpp
index 2fb6135..d27a978 100644
--- a/services/tuner/TunerLnb.cpp
+++ b/services/tuner/TunerLnb.cpp
@@ -36,7 +36,9 @@
 }
 
 TunerLnb::~TunerLnb() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mLnb = nullptr;
     mId = -1;
 }
@@ -70,6 +72,7 @@
 }
 
 ::ndk::ScopedAStatus TunerLnb::close() {
+    isClosed = true;
     return mLnb->close();
 }
 
diff --git a/services/tuner/TunerLnb.h b/services/tuner/TunerLnb.h
index 72988a6..b0222d7 100644
--- a/services/tuner/TunerLnb.h
+++ b/services/tuner/TunerLnb.h
@@ -66,6 +66,7 @@
 private:
     int mId;
     shared_ptr<ILnb> mLnb;
+    bool isClosed = false;
 };
 
 }  // namespace tuner
diff --git a/services/tuner/TunerTimeFilter.cpp b/services/tuner/TunerTimeFilter.cpp
index 385a063..7a4e200 100644
--- a/services/tuner/TunerTimeFilter.cpp
+++ b/services/tuner/TunerTimeFilter.cpp
@@ -35,7 +35,9 @@
 }
 
 TunerTimeFilter::~TunerTimeFilter() {
-    close();
+    if (!isClosed) {
+        close();
+    }
     mTimeFilter = nullptr;
 }
 
@@ -64,6 +66,7 @@
 }
 
 ::ndk::ScopedAStatus TunerTimeFilter::close() {
+    isClosed = true;
     return mTimeFilter->close();
 }
 
diff --git a/services/tuner/TunerTimeFilter.h b/services/tuner/TunerTimeFilter.h
index 31a47cd..7e40ebe 100644
--- a/services/tuner/TunerTimeFilter.h
+++ b/services/tuner/TunerTimeFilter.h
@@ -45,6 +45,7 @@
 
 private:
     shared_ptr<ITimeFilter> mTimeFilter;
+    bool isClosed = false;
 };
 
 }  // namespace tuner