[automerger skipped] Merge "AudioFlinger: Update AudioFlinger class includes" into udc-qpr-dev-plus-aosp am: d89d56ff8a -s ours

am skip reason: Merged-In Idfa767f596ef6c6155664d06cbf177183cc08155 with SHA-1 9a57fc3408 is already in history

Original change: https://googleplex-android-review.googlesource.com/c/platform/frameworks/av/+/24427809

Change-Id: Iae0a3d78990feab320ea3d5c037db143b4aadde2
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 2244682..35b8e21 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -19,7 +19,6 @@
 #define LOG_TAG "Camera"
 #include <utils/Log.h>
 #include <utils/threads.h>
-#include <utils/String16.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
 #include <binder/IMemory.h>
@@ -70,7 +69,7 @@
     // deadlock if we call any method of ICamera here.
 }
 
-sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
+sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
         int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
         bool forceSlowJpegMode)
 {
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 9ae4607..2a102d0 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -31,6 +31,7 @@
 
 #include <camera/CameraBase.h>
 #include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
 
 // needed to instantiate
 #include <camera/Camera.h>
@@ -58,7 +59,7 @@
 }
 
 status_t CameraStatus::writeToParcel(android::Parcel* parcel) const {
-    auto res = parcel->writeString16(String16(cameraId));
+    auto res = parcel->writeString16(toString16(cameraId));
     if (res != OK) return res;
 
     res = parcel->writeInt32(status);
@@ -66,12 +67,12 @@
 
     std::vector<String16> unavailablePhysicalIds16;
     for (auto& id8 : unavailablePhysicalIds) {
-        unavailablePhysicalIds16.push_back(String16(id8));
+        unavailablePhysicalIds16.push_back(toString16(id8));
     }
     res = parcel->writeString16Vector(unavailablePhysicalIds16);
     if (res != OK) return res;
 
-    res = parcel->writeString16(String16(clientPackage));
+    res = parcel->writeString16(toString16(clientPackage));
     return res;
 }
 
@@ -79,7 +80,7 @@
     String16 tempCameraId;
     auto res = parcel->readString16(&tempCameraId);
     if (res != OK) return res;
-    cameraId = String8(tempCameraId);
+    cameraId = toString8(tempCameraId);
 
     res = parcel->readInt32(&status);
     if (res != OK) return res;
@@ -88,13 +89,13 @@
     res = parcel->readString16Vector(&unavailablePhysicalIds16);
     if (res != OK) return res;
     for (auto& id16 : unavailablePhysicalIds16) {
-        unavailablePhysicalIds.push_back(String8(id16));
+        unavailablePhysicalIds.push_back(toStdString(id16));
     }
 
     String16 tempClientPackage;
     res = parcel->readString16(&tempClientPackage);
     if (res != OK) return res;
-    clientPackage = String8(tempClientPackage);
+    clientPackage = toStdString(tempClientPackage);
 
     return res;
 }
@@ -103,7 +104,6 @@
 
 namespace {
     sp<::android::hardware::ICameraService> gCameraService;
-    const int                 kCameraServicePollDelay = 500000; // 0.5s
     const char*               kCameraServiceName      = "media.camera";
 
     Mutex                     gLock;
@@ -141,14 +141,10 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16(kCameraServiceName));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->waitForService(toString16(kCameraServiceName));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (gDeathNotifier == NULL) {
             gDeathNotifier = new DeathNotifier();
         }
@@ -161,7 +157,7 @@
 
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
-                                               const String16& clientPackageName,
+                                               const std::string& clientPackageName,
                                                int clientUid, int clientPid, int targetSdkVersion,
                                                bool overrideToPortrait, bool forceSlowJpegMode)
 {
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 9e9793d..36bf24c 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -20,6 +20,7 @@
 #include <utils/String16.h>
 
 #include <camera/CameraSessionStats.h>
+#include <camera/StringUtils.h>
 
 #include <binder/Parcel.h>
 
@@ -282,8 +283,8 @@
         mSessionIndex(0),
         mCameraExtensionSessionStats() {}
 
-CameraSessionStats::CameraSessionStats(const String16& cameraId,
-        int facing, int newCameraState, const String16& clientName,
+CameraSessionStats::CameraSessionStats(const std::string& cameraId,
+        int facing, int newCameraState, const std::string& clientName,
         int apiLevel, bool isNdk, int32_t latencyMs, int64_t logId) :
                 mCameraId(cameraId),
                 mFacing(facing),
@@ -425,10 +426,10 @@
         return err;
     }
 
-    mCameraId = id;
+    mCameraId = toStdString(id);
     mFacing = facing;
     mNewCameraState = newCameraState;
-    mClientName = clientName;
+    mClientName = toStdString(clientName);
     mApiLevel = apiLevel;
     mIsNdk = isNdk;
     mLatencyMs = latencyMs;
@@ -440,7 +441,7 @@
     mResultErrorCount = resultErrorCount;
     mDeviceError = deviceError;
     mStreamStats = std::move(streamStats);
-    mUserTag = userTag;
+    mUserTag = toStdString(userTag);
     mVideoStabilizationMode = videoStabilizationMode;
     mSessionIndex = sessionIdx;
     mCameraExtensionSessionStats = extStats;
@@ -456,7 +457,7 @@
 
     status_t err = OK;
 
-    if ((err = parcel->writeString16(mCameraId)) != OK) {
+    if ((err = parcel->writeString16(toString16(mCameraId))) != OK) {
         ALOGE("%s: Failed to write camera id!", __FUNCTION__);
         return err;
     }
@@ -471,7 +472,7 @@
         return err;
     }
 
-    if ((err = parcel->writeString16(mClientName)) != OK) {
+    if ((err = parcel->writeString16(toString16(mClientName))) != OK) {
         ALOGE("%s: Failed to write client name!", __FUNCTION__);
         return err;
     }
@@ -531,7 +532,7 @@
         return err;
     }
 
-    if ((err = parcel->writeString16(mUserTag)) != OK) {
+    if ((err = parcel->writeString16(toString16(mUserTag))) != OK) {
         ALOGE("%s: Failed to write user tag!", __FUNCTION__);
         return err;
     }
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index bb880d1..9ff2578 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -18,6 +18,7 @@
 #include <utils/Log.h>
 
 #include <camera/CaptureResult.h>
+#include <camera/StringUtils.h>
 #include <binder/Parcel.h>
 
 namespace android {
@@ -47,7 +48,7 @@
             ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
             return res;
         }
-        errorPhysicalCameraId = cameraId;
+        errorPhysicalCameraId = toStdString(cameraId);
     }
     parcel->readInt64(&lastCompletedRegularFrameNumber);
     parcel->readInt64(&lastCompletedReprocessFrameNumber);
@@ -75,7 +76,7 @@
     if (errorPhysicalCameraId.size() > 0) {
         parcel->writeBool(true);
         status_t res = OK;
-        if ((res = parcel->writeString16(errorPhysicalCameraId)) != OK) {
+        if ((res = parcel->writeString16(toString16(errorPhysicalCameraId))) != OK) {
             ALOGE("%s: Failed to write physical camera ID to parcel: %d", __FUNCTION__, res);
             return res;
         }
@@ -96,13 +97,15 @@
 status_t PhysicalCaptureResultInfo::readFromParcel(const android::Parcel* parcel) {
     status_t res;
 
-    mPhysicalCameraId.setTo(u"");
+    mPhysicalCameraId = "";
     mPhysicalCameraMetadata.clear();
 
-    if ((res = parcel->readString16(&mPhysicalCameraId)) != OK) {
+    String16 physicalCameraId;
+    if ((res = parcel->readString16(&physicalCameraId)) != OK) {
         ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
         return res;
     }
+    mPhysicalCameraId = toStdString(physicalCameraId);
 
     if ((res = mPhysicalCameraMetadata.readFromParcel(parcel)) != OK) {
         ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
@@ -113,7 +116,7 @@
 
 status_t PhysicalCaptureResultInfo::writeToParcel(android::Parcel* parcel) const {
     status_t res;
-    if ((res = parcel->writeString16(mPhysicalCameraId)) != OK) {
+    if ((res = parcel->writeString16(toString16(mPhysicalCameraId))) != OK) {
         ALOGE("%s: Failed to write physical camera ID to parcel: %d",
                 __FUNCTION__, res);
         return res;
@@ -187,7 +190,8 @@
             return res;
         }
 
-        mPhysicalMetadatas.emplace(mPhysicalMetadatas.end(), cameraId, physicalMetadata);
+        mPhysicalMetadatas.emplace(mPhysicalMetadatas.end(), toStdString(cameraId),
+                physicalMetadata);
     }
     ALOGV("%s: Read physical metadata from parcel", __FUNCTION__);
 
@@ -228,7 +232,7 @@
         return BAD_VALUE;
     }
     for (const auto& physicalMetadata : mPhysicalMetadatas) {
-        if ((res = parcel->writeString16(physicalMetadata.mPhysicalCameraId)) != OK) {
+        if ((res = parcel->writeString16(toString16(physicalMetadata.mPhysicalCameraId))) != OK) {
             ALOGE("%s: Failed to write physical camera ID to parcel: %d",
                     __FUNCTION__, res);
             return res;
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
index e875c53..453f696 100644
--- a/camera/aidl/android/hardware/CameraIdRemapping.aidl
+++ b/camera/aidl/android/hardware/CameraIdRemapping.aidl
@@ -30,17 +30,17 @@
      */
     parcelable PackageIdRemapping {
         /** Package Name (e.g. com.android.xyz). */
-        String packageName;
+        @utf8InCpp String packageName;
         /**
          * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
          * affected.
          */
-        List<String> cameraIdsToReplace;
+        @utf8InCpp List<String> cameraIdsToReplace;
         /**
          *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
          *  the updated camera id for cameraIdsToReplace[i].
          */
-        List<String> updatedCameraIds;
+        @utf8InCpp List<String> updatedCameraIds;
     }
 
     /**
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 01b8ff8..409a930 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -83,7 +83,7 @@
      */
     ICamera connect(ICameraClient client,
             int cameraId,
-            String opPackageName,
+            @utf8InCpp String opPackageName,
             int clientUid, int clientPid,
             int targetSdkVersion,
             boolean overrideToPortrait,
@@ -94,9 +94,9 @@
      * Only supported for device HAL versions >= 3.2
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
-            String cameraId,
-            String opPackageName,
-            @nullable String featureId,
+            @utf8InCpp String cameraId,
+            @utf8InCpp String opPackageName,
+            @nullable @utf8InCpp String featureId,
             int clientUid, int oomScoreOffset,
             int targetSdkVersion,
             boolean overrideToPortrait);
@@ -156,7 +156,7 @@
      * Read the static camera metadata for a camera device.
      * Only supported for device HAL versions >= 3.2
      */
-    CameraMetadataNative getCameraCharacteristics(String cameraId, int targetSdkVersion,
+    CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
             boolean overrideToPortrait);
 
     /**
@@ -177,7 +177,7 @@
     /**
      * Read the legacy camera1 parameters into a String
      */
-    String getLegacyParameters(int cameraId);
+    @utf8InCpp String getLegacyParameters(int cameraId);
 
     /**
      * apiVersion constants for supportsCameraApi
@@ -186,21 +186,21 @@
     const int API_VERSION_2 = 2;
 
     // Determines if a particular API version is supported directly for a cameraId.
-    boolean supportsCameraApi(String cameraId, int apiVersion);
+    boolean supportsCameraApi(@utf8InCpp String cameraId, int apiVersion);
     // Determines if a cameraId is a hidden physical camera of a logical multi-camera.
-    boolean isHiddenPhysicalCamera(String cameraId);
+    boolean isHiddenPhysicalCamera(@utf8InCpp String cameraId);
     // Inject the external camera to replace the internal camera session.
-    ICameraInjectionSession injectCamera(String packageName, String internalCamId,
-            String externalCamId, in ICameraInjectionCallback CameraInjectionCallback);
+    ICameraInjectionSession injectCamera(@utf8InCpp String packageName, @utf8InCpp String internalCamId,
+            @utf8InCpp String externalCamId, in ICameraInjectionCallback CameraInjectionCallback);
 
-    void setTorchMode(String cameraId, boolean enabled, IBinder clientBinder);
+    void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder);
 
     // Change the brightness level of the flash unit associated with cameraId to strengthLevel.
     // If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
-    void turnOnTorchWithStrengthLevel(String cameraId, int strengthLevel, IBinder clientBinder);
+    void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel, IBinder clientBinder);
 
     // Get the brightness level of the flash unit associated with cameraId.
-    int getTorchStrengthLevel(String cameraId);
+    int getTorchStrengthLevel(@utf8InCpp String cameraId);
 
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
@@ -250,7 +250,7 @@
      *
      * @return the key that must be used to report updates to previously reported stats.
      */
-    String reportExtensionSessionStats(in CameraExtensionSessionStats stats);
+    @utf8InCpp String reportExtensionSessionStats(in CameraExtensionSessionStats stats);
 
     // Bitfield constants for notifyDeviceStateChange
     // All bits >= 32 are for custom vendor states
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index 5f17f5b..23a87d3 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -51,13 +51,14 @@
     // Use to initialize variables only
     const int STATUS_UNKNOWN          = -1;
 
-    oneway void onStatusChanged(int status, String cameraId);
+    oneway void onStatusChanged(int status, @utf8InCpp String cameraId);
 
     /**
      * Notify registered client about status changes for a physical camera backing
      * a logical camera.
      */
-    oneway void onPhysicalCameraStatusChanged(int status, String cameraId, String physicalCameraId);
+    oneway void onPhysicalCameraStatusChanged(int status, @utf8InCpp String cameraId,
+            @utf8InCpp String physicalCameraId);
 
     /**
      * The torch mode status of a camera.
@@ -81,9 +82,9 @@
     // Use to initialize variables only
     const int TORCH_STATUS_UNKNOWN = -1;
 
-    oneway void onTorchStatusChanged(int status, String cameraId);
+    oneway void onTorchStatusChanged(int status, @utf8InCpp String cameraId);
 
-    oneway void onTorchStrengthLevelChanged(String cameraId, int newTorchStrength);
+    oneway void onTorchStrengthLevelChanged(@utf8InCpp String cameraId, int newTorchStrength);
 
     /**
      * Notify registered clients about camera access priority changes.
@@ -97,6 +98,6 @@
      * Only clients with android.permission.CAMERA_OPEN_CLOSE_LISTENER permission
      * will receive such callbacks.
      */
-    oneway void onCameraOpened(String cameraId, String clientPackageId);
-    oneway void onCameraClosed(String cameraId);
+    oneway void onCameraOpened(@utf8InCpp String cameraId, @utf8InCpp String clientPackageId);
+    oneway void onCameraClosed(@utf8InCpp String cameraId);
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 4faa6b4..dcd69b0 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -44,14 +44,14 @@
      * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_180},
      * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_270}).
      */
-    int getRotateAndCropOverride(String packageName, int lensFacing, int userId);
+    int getRotateAndCropOverride(@utf8InCpp String packageName, int lensFacing, int userId);
 
     /**
      * Returns the necessary autoframing override for the top activity which
      * will be one of ({@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_FALSE},
      * {@link android.hardware.camera2.CameraMetadata#AUTOFRAMING_TRUE}).
      */
-    int getAutoframingOverride(String packageName);
+    int getAutoframingOverride(@utf8InCpp String packageName);
 
     /**
      * Checks if the camera has been disabled via device policy.
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 7a8a4ba..071f34e 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -21,6 +21,7 @@
 #include <utils/String16.h>
 
 #include <camera/camera2/CaptureRequest.h>
+#include <camera/StringUtils.h>
 
 #include <binder/Parcel.h>
 #include <gui/Surface.h>
@@ -74,7 +75,7 @@
             return err;
         }
         ALOGV("%s: Read metadata from parcel", __FUNCTION__);
-        mPhysicalCameraSettings.push_back({std::string(String8(id).string()), settings});
+        mPhysicalCameraSettings.push_back({toStdString(id), settings});
     }
 
     int isReprocess = 0;
@@ -157,7 +158,7 @@
             ALOGE("%s: Failed to read user tag!", __FUNCTION__);
             return BAD_VALUE;
         }
-        mUserTag = String8(userTag).c_str();
+        mUserTag = toStdString(userTag);
     }
 
     return OK;
@@ -179,7 +180,7 @@
     }
 
     for (const auto &it : mPhysicalCameraSettings) {
-        if ((err = parcel->writeString16(String16(it.id.c_str()))) != OK) {
+        if ((err = parcel->writeString16(toString16(it.id))) != OK) {
             ALOGE("%s: Failed to camera id!", __FUNCTION__);
             return err;
         }
@@ -232,7 +233,7 @@
         parcel->writeInt32(0);
     } else {
         parcel->writeInt32(1);
-        parcel->writeString16(String16(mUserTag.c_str()));
+        parcel->writeString16(toString16(mUserTag));
     }
 
     return OK;
diff --git a/camera/camera2/ConcurrentCamera.cpp b/camera/camera2/ConcurrentCamera.cpp
index 01a695c..67aa876 100644
--- a/camera/camera2/ConcurrentCamera.cpp
+++ b/camera/camera2/ConcurrentCamera.cpp
@@ -20,6 +20,7 @@
 #include <utils/String16.h>
 
 #include <camera/camera2/ConcurrentCamera.h>
+#include <camera/StringUtils.h>
 
 #include <binder/Parcel.h>
 
@@ -53,7 +54,7 @@
             ALOGE("%s: Failed to read camera id!", __FUNCTION__);
             return err;
         }
-        mConcurrentCameraIds.push_back(std::string(String8(id).string()));
+        mConcurrentCameraIds.push_back(toStdString(id));
     }
     return OK;
 }
@@ -73,7 +74,7 @@
     }
 
     for (const auto &it : mConcurrentCameraIds) {
-        if ((err = parcel->writeString16(String16(it.c_str()))) != OK) {
+        if ((err = parcel->writeString16(toString16(it))) != OK) {
             ALOGE("%s: Failed to write the camera id string to parcel: %d", __FUNCTION__, err);
             return err;
         }
@@ -99,7 +100,7 @@
         ALOGE("%s: Failed to read sessionConfiguration!", __FUNCTION__);
         return err;
     }
-    mCameraId = std::string(String8(id).string());
+    mCameraId = toStdString(id);
     return OK;
 }
 
@@ -111,7 +112,7 @@
     }
 
     status_t err = OK;
-    if ((err = parcel->writeString16(String16(mCameraId.c_str()))) != OK) {
+    if ((err = parcel->writeString16(toString16(mCameraId))) != OK) {
         ALOGE("%s: Failed to write camera id!", __FUNCTION__);
         return err;
     }
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index da4484a..33220ce 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -21,6 +21,7 @@
 #include <utils/Log.h>
 
 #include <camera/camera2/OutputConfiguration.h>
+#include <camera/StringUtils.h>
 #include <binder/Parcel.h>
 #include <gui/view/Surface.h>
 #include <system/camera_metadata.h>
@@ -65,7 +66,7 @@
     return mIsShared;
 }
 
-String16 OutputConfiguration::getPhysicalCameraId() const {
+std::string OutputConfiguration::getPhysicalCameraId() const {
     return mPhysicalCameraId;
 }
 
@@ -183,7 +184,9 @@
         return err;
     }
 
-    parcel->readString16(&mPhysicalCameraId);
+    String16 physicalCameraId;
+    parcel->readString16(&physicalCameraId);
+    mPhysicalCameraId = toStdString(physicalCameraId);
 
     int isMultiResolution = 0;
     if ((err = parcel->readInt32(&isMultiResolution)) != OK) {
@@ -246,7 +249,7 @@
     for (auto& surface : surfaceShims) {
         ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                 surface.graphicBufferProducer.get(),
-                String8(surface.name).string());
+                toString8(surface.name).string());
         mGbps.push_back(surface.graphicBufferProducer);
     }
 
@@ -258,14 +261,14 @@
           " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
           ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
           __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
-          String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
+          mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
           mMirrorMode, mUseReadoutTimestamp);
 
     return err;
 }
 
 OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
-        const String16& physicalId,
+        const std::string& physicalId,
         int surfaceSetID, bool isShared) {
     mGbps.push_back(gbp);
     mRotation = rotation;
@@ -284,7 +287,7 @@
 
 OutputConfiguration::OutputConfiguration(
         const std::vector<sp<IGraphicBufferProducer>>& gbps,
-    int rotation, const String16& physicalCameraId, int surfaceSetID,  int surfaceType,
+    int rotation, const std::string& physicalCameraId, int surfaceSetID,  int surfaceType,
     int width, int height, bool isShared)
   : mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
     mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
@@ -331,7 +334,8 @@
     err = parcel->writeParcelableVector(surfaceShims);
     if (err != OK) return err;
 
-    err = parcel->writeString16(mPhysicalCameraId);
+    String16 physicalCameraId = toString16(mPhysicalCameraId);
+    err = parcel->writeString16(physicalCameraId);
     if (err != OK) return err;
 
     err = parcel->writeInt32(mIsMultiResolution ? 1 : 0);
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8472562..13b705c 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -26,12 +26,15 @@
 
     srcs: ["main_cameraserver.cpp"],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     header_libs: [
         "libmedia_headers",
     ],
 
     shared_libs: [
-        "libcameraservice",
         "liblog",
         "libutils",
         "libui",
@@ -40,15 +43,13 @@
         "libbinder_ndk",
         "libhidlbase",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
     ],
+    static_libs: [
+        "libcameraservice",
+    ],
     compile_multilib: "first",
     cflags: [
         "-Wall",
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 21b57af..6655f82 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -56,9 +56,9 @@
     typedef CameraListener                     TCamListener;
     typedef ::android::hardware::ICamera       TCamUser;
     typedef ::android::hardware::ICameraClient TCamCallbacks;
-    typedef ::android::binder::Status(::android::hardware::ICameraService::*TCamConnectService)
+    typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const String16&, int, int, int, bool, bool,
+        int, const std::string&, int, int, int, bool, bool,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -80,7 +80,7 @@
             // construct a camera client from an existing remote
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
-                                const String16& clientPackageName,
+                                const std::string& clientPackageName,
                                 int clientUid, int clientPid, int targetSdkVersion,
                                 bool overrideToPortrait, bool forceSlowJpegMode);
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index b20dc1b..6af7f2a 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -73,7 +73,7 @@
     /**
      * The name of the camera device
      */
-    String8 cameraId;
+    std::string cameraId;
 
     /**
      * Its current status, one of the ICameraService::STATUS_* fields
@@ -83,18 +83,18 @@
     /**
      * Unavailable physical camera names for a multi-camera device
      */
-    std::vector<String8> unavailablePhysicalIds;
+    std::vector<std::string> unavailablePhysicalIds;
 
     /**
      * Client package name if camera is open, otherwise not applicable
      */
-    String8 clientPackage;
+    std::string clientPackage;
 
     virtual status_t writeToParcel(android::Parcel* parcel) const;
     virtual status_t readFromParcel(const android::Parcel* parcel);
 
-    CameraStatus(String8 id, int32_t s, const std::vector<String8>& unavailSubIds,
-            const String8& clientPkg) : cameraId(id), status(s),
+    CameraStatus(std::string id, int32_t s, const std::vector<std::string>& unavailSubIds,
+            const std::string& clientPkg) : cameraId(id), status(s),
             unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg) {}
     CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
 };
@@ -118,7 +118,7 @@
     typedef typename TCamTraits::TCamConnectService TCamConnectService;
 
     static sp<TCam>      connect(int cameraId,
-                                 const String16& clientPackageName,
+                                 const std::string& clientPackageName,
                                  int clientUid, int clientPid, int targetSdkVersion,
                                  bool overrideToPortrait, bool forceSlowJpegMode);
     virtual void         disconnect();
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 071bc73..70ca0b3 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
 #define ANDROID_HARDWARE_CAMERA_SERVICE_SESSION_STATS_H
 
+#include <string>
+
 #include <binder/Parcelable.h>
 
 #include <camera/CameraMetadata.h>
@@ -121,10 +123,10 @@
     static const int CAMERA_API_LEVEL_1;
     static const int CAMERA_API_LEVEL_2;
 
-    String16 mCameraId;
+    std::string mCameraId;
     int mFacing;
     int mNewCameraState;
-    String16 mClientName;
+    std::string mClientName;
     int mApiLevel;
     bool mIsNdk;
     // latency in ms for camera open, close, or session creation.
@@ -157,7 +159,7 @@
     // Whether the device runs into an error state
     bool mDeviceError;
     std::vector<CameraStreamStats> mStreamStats;
-    String16 mUserTag;
+    std::string mUserTag;
     int mVideoStabilizationMode;
     int mSessionIndex;
 
@@ -165,8 +167,8 @@
 
     // Constructors
     CameraSessionStats();
-    CameraSessionStats(const String16& cameraId, int facing, int newCameraState,
-                       const String16& clientName, int apiLevel, bool isNdk, int32_t latencyMs,
+    CameraSessionStats(const std::string& cameraId, int facing, int newCameraState,
+                       const std::string& clientName, int apiLevel, bool isNdk, int32_t latencyMs,
                        int64_t logId);
 
     virtual status_t readFromParcel(const android::Parcel* parcel) override;
diff --git a/camera/include/camera/CaptureResult.h b/camera/include/camera/CaptureResult.h
index de534ab..e08c9ca 100644
--- a/camera/include/camera/CaptureResult.h
+++ b/camera/include/camera/CaptureResult.h
@@ -74,7 +74,7 @@
      * a reference to physical camera device.
      * Empty otherwise.
      */
-    String16  errorPhysicalCameraId;
+    std::string errorPhysicalCameraId;
 
     // The last completed frame numbers shouldn't be checked in onResultReceived() and notifyError()
     // because the output buffers could be arriving after onResultReceived() and
@@ -150,13 +150,13 @@
         : mPhysicalCameraId(),
           mPhysicalCameraMetadata() {
     }
-    PhysicalCaptureResultInfo(const String16& cameraId,
+    PhysicalCaptureResultInfo(const std::string& cameraId,
             const CameraMetadata& cameraMetadata)
             : mPhysicalCameraId(cameraId),
               mPhysicalCameraMetadata(cameraMetadata) {
     }
 
-    String16  mPhysicalCameraId;
+    std::string mPhysicalCameraId;
     CameraMetadata mPhysicalCameraMetadata;
 
     virtual status_t                readFromParcel(const android::Parcel* parcel) override;
diff --git a/camera/include/camera/StringUtils.h b/camera/include/camera/StringUtils.h
new file mode 100644
index 0000000..b9dfbfc
--- /dev/null
+++ b/camera/include/camera/StringUtils.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_STRINGUTILS_H
+#define ANDROID_SERVERS_CAMERA_STRINGUTILS_H
+
+#include <codecvt>
+#include <locale>
+#include <memory>
+#include <optional>
+#include <string>
+
+#include <fmt/printf.h>
+#include <utils/String8.h>
+#include <utils/String16.h>
+
+namespace android {
+    inline String8 toString8(const std::string &str) {
+        return String8(str.c_str());
+    }
+
+    inline String8 toString8(const String16 &str) {
+        return String8(str);
+    }
+
+    inline String8 toString8(const char *str) {
+        return String8(str);
+    }
+
+    inline String16 toString16(const std::string &str) {
+        return String16(str.c_str());
+    }
+
+    inline String16 toString16(const String8 &str) {
+        return String16(str);
+    }
+
+    inline String16 toString16(const char *str) {
+        return String16(str);
+    }
+
+    inline std::optional<String16> toString16(std::optional<std::string> str) {
+        if (str.has_value()) {
+            return std::optional<String16>(toString16(str.value()));
+        }
+
+        return std::nullopt;
+    }
+
+    inline std::string toStdString(const String8 &str) {
+        return std::string(str.string());
+    }
+
+    inline std::string toStdString(const String16 &str) {
+        std::wstring_convert<std::codecvt_utf8_utf16<char16_t>, char16_t> convert;
+        return convert.to_bytes(str.string());
+    }
+
+    /**
+     * Convert a non-null-terminated UTF16 string to a UTF8 string (i.e. in jni functions)
+     * len is the number of characters.
+     */
+    inline std::string toStdString(const char16_t *str, size_t len) {
+        std::wstring_convert<std::codecvt_utf8_utf16<char16_t>, char16_t> convert;
+        return convert.to_bytes(str, str + len);
+    }
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_STRINGUTILS_H
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 16fddb5..3f74b4a 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -17,6 +17,8 @@
 #ifndef ANDROID_HARDWARE_CAMERA2_OUTPUTCONFIGURATION_H
 #define ANDROID_HARDWARE_CAMERA2_OUTPUTCONFIGURATION_H
 
+#include <string>
+
 #include <gui/IGraphicBufferProducer.h>
 #include <binder/Parcelable.h>
 
@@ -63,7 +65,7 @@
     int32_t                    getColorSpace() const;
     bool                       isDeferred() const;
     bool                       isShared() const;
-    String16                   getPhysicalCameraId() const;
+    std::string                getPhysicalCameraId() const;
     bool                       isMultiResolution() const;
     int64_t                    getStreamUseCase() const;
     int                        getTimestampBase() const;
@@ -90,11 +92,11 @@
     OutputConfiguration(const android::Parcel& parcel);
 
     OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
-            const String16& physicalCameraId,
+            const std::string& physicalCameraId,
             int surfaceSetID = INVALID_SET_ID, bool isShared = false);
 
     OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
-                        int rotation, const String16& physicalCameraId,
+                        int rotation, const std::string& physicalCameraId,
                         int surfaceSetID = INVALID_SET_ID,
                         int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
                         int height = 0, bool isShared = false);
@@ -192,7 +194,7 @@
     int                        mHeight;
     bool                       mIsDeferred;
     bool                       mIsShared;
-    String16                   mPhysicalCameraId;
+    std::string                mPhysicalCameraId;
     bool                       mIsMultiResolution;
     std::vector<int32_t>       mSensorPixelModesUsed;
     int64_t                    mDynamicRangeProfile;
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 24a11e3..921aab2 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -177,6 +177,7 @@
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libhidlbase",
         "libmediandk",
         "libnativewindow",
         "libutils",
@@ -186,6 +187,7 @@
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
+        "android.hidl.token@1.0",
     ],
     cflags: [
         "-D__ANDROID_VNDK__",
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 1dae0f9..8bdb6d4 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -21,6 +21,7 @@
 #include <inttypes.h>
 #include <android/hardware/ICameraService.h>
 #include <gui/Surface.h>
+#include <camera/StringUtils.h>
 #include "ACameraDevice.h"
 #include "ACameraMetadata.h"
 #include "ACaptureRequest.h"
@@ -234,8 +235,7 @@
             return ret;
         }
 
-        String16 physicalId16(output.mPhysicalCameraId.c_str());
-        OutputConfiguration outConfig(iGBP, output.mRotation, physicalId16,
+        OutputConfiguration outConfig(iGBP, output.mRotation, output.mPhysicalCameraId,
                 OutputConfiguration::INVALID_SET_ID, true);
 
         for (auto& anw : output.mSharedWindows) {
@@ -299,8 +299,7 @@
         return ret;
     }
 
-    String16 physicalId16(output->mPhysicalCameraId.c_str());
-    OutputConfiguration outConfig(iGBP, output->mRotation, physicalId16,
+    OutputConfiguration outConfig(iGBP, output->mRotation, output->mPhysicalCameraId,
             OutputConfiguration::INVALID_SET_ID, true);
 
     for (auto& anw : output->mSharedWindows) {
@@ -683,9 +682,8 @@
         if (ret != ACAMERA_OK) {
             return ret;
         }
-        String16 physicalId16(outConfig.mPhysicalCameraId.c_str());
         outputSet.insert(std::make_pair(
-                anw, OutputConfiguration(iGBP, outConfig.mRotation, physicalId16,
+                anw, OutputConfiguration(iGBP, outConfig.mRotation, outConfig.mPhysicalCameraId,
                         OutputConfiguration::INVALID_SET_ID, outConfig.mIsShared)));
     }
     auto addSet = outputSet;
@@ -919,7 +917,7 @@
         msg->setObject(kSessionSpKey, session);
         if (cbh.mIsLogicalCameraCallback) {
             if (resultExtras.errorPhysicalCameraId.size() > 0) {
-                String8 cameraId(resultExtras.errorPhysicalCameraId);
+                String8 cameraId = toString8(resultExtras.errorPhysicalCameraId);
                 msg->setString(kFailingPhysicalCameraId, cameraId.string(), cameraId.size());
             }
             msg->setPointer(kCallbackFpKey, (void*) cbh.mOnLogicalCameraCaptureFailed);
@@ -1215,7 +1213,7 @@
                     std::vector<std::string> physicalCameraIds;
                     std::vector<sp<ACameraMetadata>> physicalMetadataCopy;
                     for (size_t i = 0; i < physicalResultInfo.size(); i++) {
-                        String8 physicalId8(physicalResultInfo[i].mPhysicalCameraId);
+                        String8 physicalId8 = toString8(physicalResultInfo[i].mPhysicalCameraId);
                         physicalCameraIds.push_back(physicalId8.c_str());
 
                         CameraMetadata clone = physicalResultInfo[i].mPhysicalCameraMetadata;
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 837b5be..299ffc0 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -25,6 +25,7 @@
 #include <cutils/properties.h>
 #include <stdlib.h>
 #include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
 #include <camera/VendorTagDescriptor.h>
 
 using namespace android::acam;
@@ -84,7 +85,7 @@
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
         do {
-            binder = sm->getService(String16(kCameraServiceName));
+            binder = sm->getService(toString16(kCameraServiceName));
             if (binder != nullptr) {
                 break;
             }
@@ -188,12 +189,12 @@
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         AutoMutex lock(cm->mLock);
-        std::vector<String8> cameraIdList;
+        std::vector<std::string> cameraIdList;
         for (auto& pair : cm->mDeviceStatusMap) {
             cameraIdList.push_back(pair.first);
         }
 
-        for (String8 cameraId : cameraIdList) {
+        for (const std::string& cameraId : cameraIdList) {
             cm->onStatusChangedLocked(
                     CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
         }
@@ -259,7 +260,7 @@
     // Send initial callbacks if callback is newly registered
     if (pair.second) {
         for (auto& pair : mDeviceStatusMap) {
-            const String8& cameraId = pair.first;
+            const std::string& cameraId = pair.first;
             int32_t status = pair.second.getStatus();
             // Don't send initial callbacks for camera ids which don't support
             // camera2
@@ -273,12 +274,12 @@
                     cb.mAvailable : cb.mUnavailable;
             msg->setPointer(kCallbackFpKey, (void *) cbFunc);
             msg->setPointer(kContextKey, cb.mContext);
-            msg->setString(kCameraIdKey, AString(cameraId));
+            msg->setString(kCameraIdKey, AString(cameraId.c_str()));
             mPendingCallbackCnt++;
             msg->post();
 
             // Physical camera unavailable callback
-            std::set<String8> unavailablePhysicalCameras =
+            std::set<std::string> unavailablePhysicalCameras =
                     pair.second.getUnavailablePhysicalIds();
             for (const auto& physicalCameraId : unavailablePhysicalCameras) {
                 sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
@@ -286,8 +287,8 @@
                         cb.mPhysicalCamUnavailable;
                 msg->setPointer(kCallbackFpKey, (void *) cbFunc);
                 msg->setPointer(kContextKey, cb.mContext);
-                msg->setString(kCameraIdKey, AString(cameraId));
-                msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId));
+                msg->setString(kCameraIdKey, AString(cameraId.c_str()));
+                msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId.c_str()));
                 mPendingCallbackCnt++;
                 msg->post();
             }
@@ -295,11 +296,11 @@
     }
 }
 
-bool CameraManagerGlobal::supportsCamera2ApiLocked(const String8 &cameraId) {
+bool CameraManagerGlobal::supportsCamera2ApiLocked(const std::string &cameraId) {
     bool camera2Support = false;
     auto cs = getCameraServiceLocked();
     binder::Status serviceRet =
-        cs->supportsCameraApi(String16(cameraId),
+        cs->supportsCameraApi(cameraId,
                 hardware::ICameraService::API_VERSION_2, &camera2Support);
     if (!serviceRet.isOk()) {
         ALOGE("%s: supportsCameraApi2Locked() call failed for cameraId  %s",
@@ -309,7 +310,7 @@
     return camera2Support;
 }
 
-void CameraManagerGlobal::getCameraIdList(std::vector<String8>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     Mutex::Autolock _l(mLock);
     // Needed to make sure we're connected to cameraservice
@@ -459,10 +460,10 @@
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onStatusChanged(
-        int32_t status, const String16& cameraId) {
+        int32_t status, const std::string& cameraId) {
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, String8(cameraId));
+        cm->onStatusChanged(status, cameraId);
     } else {
         ALOGE("Cannot deliver status change. Global camera manager died");
     }
@@ -470,10 +471,10 @@
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
-        int32_t status, const String16& cameraId, const String16& physicalCameraId) {
+        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, String8(cameraId), String8(physicalCameraId));
+        cm->onStatusChanged(status, cameraId, physicalCameraId);
     } else {
         ALOGE("Cannot deliver physical camera status change. Global camera manager died");
     }
@@ -495,13 +496,13 @@
 }
 
 void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const String8& cameraId) {
+        int32_t status, const std::string& cameraId) {
     Mutex::Autolock _l(mLock);
     onStatusChangedLocked(status, cameraId);
 }
 
 void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const String8& cameraId) {
+        int32_t status, const std::string& cameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
@@ -534,7 +535,7 @@
                     cb.mAvailable : cb.mUnavailable;
             msg->setPointer(kCallbackFpKey, (void *) cbFp);
             msg->setPointer(kContextKey, cb.mContext);
-            msg->setString(kCameraIdKey, AString(cameraId));
+            msg->setString(kCameraIdKey, AString(cameraId.c_str()));
             mPendingCallbackCnt++;
             msg->post();
         }
@@ -545,13 +546,13 @@
 }
 
 void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const String8& cameraId, const String8& physicalCameraId) {
+        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
     Mutex::Autolock _l(mLock);
     onStatusChangedLocked(status, cameraId, physicalCameraId);
 }
 
 void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const String8& cameraId, const String8& physicalCameraId) {
+        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
@@ -567,7 +568,7 @@
     if (logicalCamStatus != hardware::ICameraServiceListener::STATUS_PRESENT &&
             logicalCamStatus != hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalCameraId.string(), status, logicalCamStatus);
+                __FUNCTION__, physicalCameraId.c_str(), status, logicalCamStatus);
         return;
     }
 
@@ -588,8 +589,8 @@
                     cb.mPhysicalCamAvailable : cb.mPhysicalCamUnavailable;
             msg->setPointer(kCallbackFpKey, (void *) cbFp);
             msg->setPointer(kContextKey, cb.mContext);
-            msg->setString(kCameraIdKey, AString(cameraId));
-            msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId));
+            msg->setString(kCameraIdKey, AString(cameraId.c_str()));
+            msg->setString(kPhysicalCameraIdKey, AString(physicalCameraId.c_str()));
             mPendingCallbackCnt++;
             msg->post();
         }
@@ -607,20 +608,20 @@
 }
 
 bool CameraManagerGlobal::StatusAndHAL3Support::addUnavailablePhysicalId(
-        const String8& physicalCameraId) {
+        const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto result = unavailablePhysicalIds.insert(physicalCameraId);
     return result.second;
 }
 
 bool CameraManagerGlobal::StatusAndHAL3Support::removeUnavailablePhysicalId(
-        const String8& physicalCameraId) {
+        const std::string& physicalCameraId) {
     std::lock_guard<std::mutex> lock(mLock);
     auto count = unavailablePhysicalIds.erase(physicalCameraId);
     return count > 0;
 }
 
-std::set<String8> CameraManagerGlobal::StatusAndHAL3Support::getUnavailablePhysicalIds() {
+std::set<std::string> CameraManagerGlobal::StatusAndHAL3Support::getUnavailablePhysicalIds() {
     std::lock_guard<std::mutex> lock(mLock);
     return unavailablePhysicalIds;
 }
@@ -635,7 +636,7 @@
 ACameraManager::getCameraIdList(ACameraIdList** cameraIdList) {
     Mutex::Autolock _l(mLock);
 
-    std::vector<String8> idList;
+    std::vector<std::string> idList;
     CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
 
     int numCameras = idList.size();
@@ -652,7 +653,7 @@
         return ACAMERA_ERROR_NOT_ENOUGH_MEMORY;
     }
     for (int i = 0; i < numCameras; i++) {
-        const char* src = idList[i].string();
+        const char* src = idList[i].c_str();
         size_t dstSize = strlen(src) + 1;
         char* dst = new char[dstSize];
         if (!dst) {
@@ -694,7 +695,7 @@
 
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
-    binder::Status serviceRet = cs->getCameraCharacteristics(String16(cameraIdStr),
+    binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
             targetSdkVersion, /*overrideToPortrait*/false, &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -745,7 +746,7 @@
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, String16(cameraId), String16(""), {},
+            callbacks, cameraId, "", {},
             hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
             targetSdkVersion, /*overrideToPortrait*/false, /*out*/&deviceRemote);
 
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index 0dd79da..c135d0f 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -62,7 +62,7 @@
     /**
      * Return camera IDs that support camera2
      */
-    void getCameraIdList(std::vector<String8> *cameraIds);
+    void getCameraIdList(std::vector<std::string> *cameraIds);
 
   private:
     sp<hardware::ICameraService> mCameraService;
@@ -87,23 +87,23 @@
     class CameraServiceListener final : public hardware::BnCameraServiceListener {
       public:
         explicit CameraServiceListener(CameraManagerGlobal* cm) : mCameraManager(cm) {}
-        virtual binder::Status onStatusChanged(int32_t status, const String16& cameraId);
+        virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId);
         virtual binder::Status onPhysicalCameraStatusChanged(int32_t status,
-                const String16& cameraId, const String16& physicalCameraId);
+                const std::string& cameraId, const std::string& physicalCameraId);
 
         // Torch API not implemented yet
-        virtual binder::Status onTorchStatusChanged(int32_t, const String16&) {
+        virtual binder::Status onTorchStatusChanged(int32_t, const std::string&) {
             return binder::Status::ok();
         }
-        virtual binder::Status onTorchStrengthLevelChanged(const String16&, int32_t) {
+        virtual binder::Status onTorchStrengthLevelChanged(const std::string&, int32_t) {
             return binder::Status::ok();
         }
 
         virtual binder::Status onCameraAccessPrioritiesChanged();
-        virtual binder::Status onCameraOpened(const String16&, const String16&) {
+        virtual binder::Status onCameraOpened(const std::string&, const std::string&) {
             return binder::Status::ok();
         }
-        virtual binder::Status onCameraClosed(const String16&) {
+        virtual binder::Status onCameraClosed(const std::string&) {
             return binder::Status::ok();
         }
 
@@ -203,20 +203,20 @@
 
     sp<hardware::ICameraService> getCameraServiceLocked();
     void onCameraAccessPrioritiesChanged();
-    void onStatusChanged(int32_t status, const String8& cameraId);
-    void onStatusChangedLocked(int32_t status, const String8& cameraId);
-    void onStatusChanged(int32_t status, const String8& cameraId, const String8& physicalCameraId);
-    void onStatusChangedLocked(int32_t status, const String8& cameraId,
-           const String8& physicalCameraId);
+    void onStatusChanged(int32_t status, const std::string& cameraId);
+    void onStatusChangedLocked(int32_t status, const std::string& cameraId);
+    void onStatusChanged(int32_t status, const std::string& cameraId, const std::string& physicalCameraId);
+    void onStatusChangedLocked(int32_t status, const std::string& cameraId,
+           const std::string& physicalCameraId);
     // Utils for status
     static bool validStatus(int32_t status);
     static bool isStatusAvailable(int32_t status);
-    bool supportsCamera2ApiLocked(const String8 &cameraId);
+    bool supportsCamera2ApiLocked(const std::string &cameraId);
 
     // The sort logic must match the logic in
     // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
     struct CameraIdComparator {
-        bool operator()(const String8& a, const String8& b) const {
+        bool operator()(const std::string& a, const std::string& b) const {
             uint32_t aUint = 0, bUint = 0;
             bool aIsUint = base::ParseUint(a.c_str(), &aUint);
             bool bIsUint = base::ParseUint(b.c_str(), &bUint);
@@ -238,22 +238,22 @@
       private:
         int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
         mutable std::mutex mLock;
-        std::set<String8> unavailablePhysicalIds;
+        std::set<std::string> unavailablePhysicalIds;
       public:
         const bool supportsHAL3 = false;
         StatusAndHAL3Support(int32_t st, bool HAL3support):
                 status(st), supportsHAL3(HAL3support) { };
         StatusAndHAL3Support() = default;
 
-        bool addUnavailablePhysicalId(const String8& physicalCameraId);
-        bool removeUnavailablePhysicalId(const String8& physicalCameraId);
+        bool addUnavailablePhysicalId(const std::string& physicalCameraId);
+        bool removeUnavailablePhysicalId(const std::string& physicalCameraId);
         int32_t getStatus();
         void updateStatus(int32_t newStatus);
-        std::set<String8> getUnavailablePhysicalIds();
+        std::set<std::string> getUnavailablePhysicalIds();
     };
 
     // Map camera_id -> status
-    std::map<String8, StatusAndHAL3Support, CameraIdComparator> mDeviceStatusMap;
+    std::map<std::string, StatusAndHAL3Support, CameraIdComparator> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 7f6ea9d..74c6cad 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -31,10 +31,13 @@
 #include <stdio.h>
 
 #include <android/log.h>
+#include <android/hidl/manager/1.2/IServiceManager.h>
+#include <android/hidl/token/1.0/ITokenManager.h>
 #include <camera/NdkCameraError.h>
 #include <camera/NdkCameraManager.h>
 #include <camera/NdkCameraDevice.h>
 #include <camera/NdkCameraCaptureSession.h>
+#include <hidl/ServiceManagement.h>
 #include <media/NdkImage.h>
 #include <media/NdkImageReader.h>
 #include <cutils/native_handle.h>
@@ -50,6 +53,8 @@
 static constexpr int kTestImageFormat = AIMAGE_FORMAT_YUV_420_888;
 
 using android::hardware::camera::common::V1_0::helper::VendorTagDescriptorCache;
+using android::hidl::manager::V1_0::IServiceManager;
+using android::hidl::token::V1_0::ITokenManager;
 using ConfiguredWindows = std::set<const native_handle_t *>;
 
 class CameraHelper {
@@ -981,11 +986,19 @@
 
 
 TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     testBasicTakePictures(/*prepareSurfaces*/ false);
     testBasicTakePictures(/*prepareSurfaces*/ true);
 }
 
 TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
+    auto transport = android::hardware::defaultServiceManager()->getTransport(ITokenManager::descriptor, "default");
+    if (transport.isOk() && transport == IServiceManager::Transport::EMPTY) {
+        GTEST_SKIP() << "This device no longer supports AImageReader_getWindowNativeHandle";
+    }
     for (auto & v2 : {true, false}) {
         testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
         testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 1af5637..bb963ab 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -43,6 +43,7 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <camera/StringUtils.h>
 
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -68,15 +69,15 @@
 
 // Stub listener implementation
 class TestCameraServiceListener : public hardware::BnCameraServiceListener {
-    std::map<String16, int32_t> mCameraTorchStatuses;
-    std::map<String16, int32_t> mCameraStatuses;
+    std::map<std::string, int32_t> mCameraTorchStatuses;
+    std::map<std::string, int32_t> mCameraStatuses;
     mutable Mutex mLock;
     mutable Condition mCondition;
     mutable Condition mTorchCondition;
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t status, const String16& cameraId) {
+    virtual binder::Status onStatusChanged(int32_t status, const std::string& cameraId) override {
         Mutex::Autolock l(mLock);
         mCameraStatuses[cameraId] = status;
         mCondition.broadcast();
@@ -84,36 +85,37 @@
     };
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) override {
         // No op
         return binder::Status::ok();
     };
 
-    virtual binder::Status onTorchStatusChanged(int32_t status, const String16& cameraId) {
+    virtual binder::Status onTorchStatusChanged(int32_t status,
+            const std::string& cameraId) override {
         Mutex::Autolock l(mLock);
         mCameraTorchStatuses[cameraId] = status;
         mTorchCondition.broadcast();
         return binder::Status::ok();
     };
 
-    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
-            int32_t /*torchStrength*/) {
+    virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
+            int32_t /*torchStrength*/) override {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraAccessPrioritiesChanged() {
+    virtual binder::Status onCameraAccessPrioritiesChanged() override {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
-            const String16& /*clientPackageName*/) {
+    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageName*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
         // No op
         return binder::Status::ok();
     }
@@ -136,7 +138,7 @@
     bool waitForTorchState(int32_t status, int32_t cameraId) const {
         Mutex::Autolock l(mLock);
 
-        const auto& iter = mCameraTorchStatuses.find(String16(String8::format("%d", cameraId)));
+        const auto& iter = mCameraTorchStatuses.find(std::to_string(cameraId));
         if (iter != mCameraTorchStatuses.end() && iter->second == status) {
             return true;
         }
@@ -147,7 +149,7 @@
                 return false;
             }
             const auto& iter =
-                    mCameraTorchStatuses.find(String16(String8::format("%d", cameraId)));
+                    mCameraTorchStatuses.find(std::to_string(cameraId));
             foundStatus = (iter != mCameraTorchStatuses.end() && iter->second == status);
         }
         return true;
@@ -155,14 +157,14 @@
 
     int32_t getTorchStatus(int32_t cameraId) const {
         Mutex::Autolock l(mLock);
-        const auto& iter = mCameraTorchStatuses.find(String16(String8::format("%d", cameraId)));
+        const auto& iter = mCameraTorchStatuses.find(std::to_string(cameraId));
         if (iter == mCameraTorchStatuses.end()) {
             return hardware::ICameraServiceListener::TORCH_STATUS_UNKNOWN;
         }
         return iter->second;
     };
 
-    int32_t getStatus(const String16& cameraId) const {
+    int32_t getStatus(const std::string& cameraId) const {
         Mutex::Autolock l(mLock);
         const auto& iter = mCameraStatuses.find(cameraId);
         if (iter == mCameraStatuses.end()) {
@@ -352,11 +354,11 @@
 
     EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
     for (const auto &it : statuses) {
-        listener->onStatusChanged(it.status, String16(it.cameraId));
+        listener->onStatusChanged(it.status, it.cameraId);
     }
 
     for (int32_t i = 0; i < numCameras; i++) {
-        String16 cameraId = String16(String8::format("%d", i));
+        std::string cameraId = std::to_string(i);
         bool isSupported = false;
         res = service->supportsCameraApi(cameraId,
                 hardware::ICameraService::API_VERSION_2, &isSupported);
@@ -384,7 +386,7 @@
         // Check connect binder calls
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        res = service->connectDevice(callbacks, cameraId, String16("meeeeeeeee!"),
+        res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
                 {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 /*overrideToPortrait*/false, /*out*/&device);
@@ -423,12 +425,12 @@
     sp<TestCameraServiceListener> serviceListener;
 
     std::pair<sp<TestCameraDeviceCallbacks>, sp<hardware::camera2::ICameraDeviceUser>>
-            openNewDevice(const String16& deviceId) {
+            openNewDevice(const std::string& deviceId) {
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
         {
             SCOPED_TRACE("openNewDevice");
-            binder::Status res = service->connectDevice(callbacks, deviceId, String16("meeeeeeeee!"),
+            binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
                     {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                     /*overrideToPortrait*/false, /*out*/&device);
@@ -464,7 +466,7 @@
         std::vector<hardware::CameraStatus> statuses;
         service->addListener(serviceListener, &statuses);
         for (const auto &it : statuses) {
-            serviceListener->onStatusChanged(it.status, String16(it.cameraId));
+            serviceListener->onStatusChanged(it.status, it.cameraId);
         }
         service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
                 &numCameras);
@@ -484,9 +486,8 @@
     ASSERT_NOT_NULL(service);
     EXPECT_TRUE(serviceListener->waitForNumCameras(numCameras));
     for (int32_t i = 0; i < numCameras; i++) {
-        String8 cameraId8 = String8::format("%d", i);
+        std::string cameraId = std::to_string(i);
         // Make sure we're available, or skip device tests otherwise
-        String16 cameraId(cameraId8);
         int32_t s = serviceListener->getStatus(cameraId);
         EXPECT_EQ(hardware::ICameraServiceListener::STATUS_PRESENT, s);
         if (s != hardware::ICameraServiceListener::STATUS_PRESENT) {
@@ -513,7 +514,7 @@
 
         sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
 
-        String16 noPhysicalId;
+        std::string noPhysicalId;
         OutputConfiguration output(gbProducer, /*rotation*/0, noPhysicalId);
 
         // Can we configure?
@@ -550,7 +551,7 @@
         EXPECT_TRUE(res.isOk()) << res;
 
         hardware::camera2::CaptureRequest request;
-        request.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate});
+        request.mPhysicalCameraSettings.push_back({cameraId, requestTemplate});
         request.mSurfaceList.add(surface);
         request.mIsReprocess = false;
         int64_t lastFrameNumber = 0;
@@ -577,7 +578,7 @@
                 /*out*/&requestTemplate);
         EXPECT_TRUE(res.isOk()) << res;
         hardware::camera2::CaptureRequest request2;
-        request2.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate});
+        request2.mPhysicalCameraSettings.push_back({cameraId, requestTemplate});
         request2.mSurfaceList.add(surface);
         request2.mIsReprocess = false;
         callbacks->clearStatus();
@@ -610,10 +611,10 @@
         EXPECT_TRUE(res.isOk()) << res;
         android::hardware::camera2::CaptureRequest request3;
         android::hardware::camera2::CaptureRequest request4;
-        request3.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate});
+        request3.mPhysicalCameraSettings.push_back({cameraId, requestTemplate});
         request3.mSurfaceList.add(surface);
         request3.mIsReprocess = false;
-        request4.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate2});
+        request4.mPhysicalCameraSettings.push_back({cameraId, requestTemplate2});
         request4.mSurfaceList.add(surface);
         request4.mIsReprocess = false;
         std::vector<hardware::camera2::CaptureRequest> requestList;
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index f2fa48c..1de7cb4 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -62,7 +62,7 @@
 TEST_F(CameraCharacteristicsPermission, TestCameraPermission) {
     for (int32_t cameraId = 0; cameraId < numCameras; cameraId++) {
 
-        String16 cameraIdStr = String16(String8::format("%d", cameraId));
+        std::string cameraIdStr = std::to_string(cameraId);
         bool isSupported = false;
         auto rc = mCameraService->supportsCameraApi(cameraIdStr,
                 hardware::ICameraService::API_VERSION_2, &isSupported);
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 6423709..3ae7659 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -27,6 +27,7 @@
 #include <camera/CameraParameters.h>
 #include <camera/CameraMetadata.h>
 #include <camera/Camera.h>
+#include <camera/StringUtils.h>
 #include <android/hardware/ICameraService.h>
 
 using namespace android;
@@ -169,7 +170,7 @@
         sp<SurfaceControl> surfaceControl;
         sp<ICamera> cameraDevice;
 
-        String16 cameraIdStr = String16(String8::format("%d", cameraId));
+        std::string cameraIdStr = std::to_string(cameraId);
         bool isSupported = false;
         rc = mCameraService->supportsCameraApi(cameraIdStr,
                 hardware::ICameraService::API_VERSION_1, &isSupported);
@@ -208,7 +209,7 @@
         }
 
         rc = mCameraService->connect(this, cameraId,
-                String16("ZSLTest"), hardware::ICameraService::USE_CALLING_UID,
+                "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
                 hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
diff --git a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
index 45b3526..07efc20 100644
--- a/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_Parameters_fuzzer.cpp
@@ -19,6 +19,7 @@
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <utils/String16.h>
+#include <camera/StringUtils.h>
 
 using namespace std;
 using namespace android;
@@ -77,7 +78,7 @@
         } else {
             params = mFDP->ConsumeRandomLengthString();
         }
-        *obj = new type(String8(params.c_str()));
+        *obj = new type(toString8(params));
     }
 }
 
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
index 2f2ad77..c9bb20c 100644
--- a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -17,6 +17,7 @@
 #include <CameraSessionStats.h>
 #include <binder/Parcel.h>
 #include <fuzzer/FuzzedDataProvider.h>
+#include <camera/StringUtils.h>
 #include "camera2common.h"
 
 using namespace std;
@@ -100,10 +101,9 @@
     if (fdp.ConsumeBool()) {
         cameraSessionStats = new CameraSessionStats();
     } else {
-        string camId = fdp.ConsumeRandomLengthString();
-        String16 cameraId(camId.c_str());
+        string cameraId = fdp.ConsumeRandomLengthString();
         if (fdp.ConsumeBool()) {
-            parcelCamSessionStats.writeString16(cameraId);
+            parcelCamSessionStats.writeString16(toString16(cameraId));
         }
         int32_t facing = fdp.ConsumeIntegral<int32_t>();
         if (fdp.ConsumeBool()) {
@@ -113,10 +113,9 @@
         if (fdp.ConsumeBool()) {
             parcelCamSessionStats.writeInt32(newCameraState);
         }
-        string name = fdp.ConsumeRandomLengthString();
-        String16 clientName(name.c_str());
+        string clientName = fdp.ConsumeRandomLengthString();
         if (fdp.ConsumeBool()) {
-            parcelCamSessionStats.writeString16(clientName);
+            parcelCamSessionStats.writeString16(toString16(clientName));
         }
         int32_t apiLevel = fdp.ConsumeIntegral<int32_t>();
         if (fdp.ConsumeBool()) {
diff --git a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
index 06215a5..494ec1b 100644
--- a/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2CaptureRequest_fuzzer.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <CameraMetadata.h>
+#include <camera/StringUtils.h>
 #include <camera2/CaptureRequest.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/Surface.h>
@@ -45,7 +46,7 @@
     for (size_t idx = 0; idx < physicalCameraSettingsSize; ++idx) {
         string id = fdp.ConsumeRandomLengthString();
         if (fdp.ConsumeBool()) {
-            parcelCamCaptureReq.writeString16(String16(id.c_str()));
+            parcelCamCaptureReq.writeString16(toString16(id));
         }
         CameraMetadata cameraMetadata;
         if (fdp.ConsumeBool()) {
diff --git a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
index 51ac4e8..2fe9a94 100644
--- a/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2OutputConfiguration_fuzzer.cpp
@@ -38,8 +38,7 @@
         outputConfiguration = new OutputConfiguration();
     } else {
         int32_t rotation = fdp.ConsumeIntegral<int32_t>();
-        string phyCameraId = fdp.ConsumeRandomLengthString();
-        String16 physicalCameraId(phyCameraId.c_str());
+        string physicalCameraId = fdp.ConsumeRandomLengthString();
         int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
         bool isShared = fdp.ConsumeBool();
 
diff --git a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
index b2de95d..7cd0e59 100644
--- a/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2SessionConfiguration_fuzzer.cpp
@@ -65,8 +65,7 @@
             surface.clear();
         }
         int32_t rotation = fdp.ConsumeIntegral<int32_t>();
-        string phyCameraId = fdp.ConsumeRandomLengthString();
-        String16 physicalCameraId(phyCameraId.c_str());
+        string physicalCameraId = fdp.ConsumeRandomLengthString();
         int32_t surfaceSetID = fdp.ConsumeIntegral<int32_t>();
         bool isShared = fdp.ConsumeBool();
         outputConfiguration =
diff --git a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
index 1396431..dd857d4 100644
--- a/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_captureResult_fuzzer.cpp
@@ -32,8 +32,7 @@
     if (fdp.ConsumeBool()) {
         physicalCaptureResultInfo = new PhysicalCaptureResultInfo();
     } else {
-        string camId = fdp.ConsumeRandomLengthString();
-        String16 cameraId(camId.c_str());
+        string cameraId = fdp.ConsumeRandomLengthString();
         CameraMetadata cameraMetadata = CameraMetadata();
         physicalCaptureResultInfo = new PhysicalCaptureResultInfo(cameraId, cameraMetadata);
     }
@@ -47,9 +46,7 @@
     }
     if (fdp.ConsumeBool()) {
         captureResult->mResultExtras = CaptureResultExtras();
-        string errCamId = fdp.ConsumeRandomLengthString();
-        String16 errCameraId(errCamId.c_str());
-        captureResult->mResultExtras.errorPhysicalCameraId = errCameraId;
+        captureResult->mResultExtras.errorPhysicalCameraId = fdp.ConsumeRandomLengthString();
         captureResult->mResultExtras.isValid();
         invokeReadWriteNullParcel<CaptureResultExtras>(&(captureResult->mResultExtras));
     }
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index f9ef98e..d09a6dd 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -149,7 +149,7 @@
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
     mCameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
-                            String16("CAMERAFUZZ"), hardware::ICameraService::USE_CALLING_UID,
+                            "CAMERAFUZZ", hardware::ICameraService::USE_CALLING_UID,
                             hardware::ICameraService::USE_CALLING_PID,
                             /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                             /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, &cameraDevice);
@@ -294,18 +294,15 @@
         cameraStatus = new CameraStatus();
     } else {
         string cid = mFDP->ConsumeRandomLengthString();
-        String8 id(cid.c_str());
         int32_t status = mFDP->ConsumeIntegral<int32_t>();
         size_t unavailSubIdsSize = mFDP->ConsumeIntegralInRange<size_t>(kSizeMin, kSizeMax);
-        vector<String8> unavailSubIds;
+        vector<std::string> unavailSubIds;
         for (size_t idx = 0; idx < unavailSubIdsSize; ++idx) {
             string subId = mFDP->ConsumeRandomLengthString();
-            String8 unavailSubId(subId.c_str());
-            unavailSubIds.push_back(unavailSubId);
+            unavailSubIds.push_back(subId);
         }
-        string clientPkg = mFDP->ConsumeRandomLengthString();
-        String8 clientPackage(clientPkg.c_str());
-        cameraStatus = new CameraStatus(id, status, unavailSubIds, clientPackage);
+        string clientPackage = mFDP->ConsumeRandomLengthString();
+        cameraStatus = new CameraStatus(cid, status, unavailSubIds, clientPackage);
     }
 
     invokeReadWriteParcel<CameraStatus>(cameraStatus);
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 17d7046..a19ef8e 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -90,9 +90,12 @@
 
 status_t Overlay::stop() {
     ALOGV("Overlay::stop");
-    Mutex::Autolock _l(mMutex);
-    mState = STOPPING;
-    mEventCond.signal();
+    {
+        Mutex::Autolock _l(mMutex);
+        mState = STOPPING;
+        mEventCond.signal();
+    }
+    join();
     return NO_ERROR;
 }
 
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index 828d861..2030dc7 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -25,21 +25,31 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "packagemanager_aidl-cpp",
+    defaults: [
+        "libaaudioservice_dependencies",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+        "latest_android_hardware_audio_core_sounddose_ndk_shared",
+    ],
+
+    static_libs: [
         "libaaudioservice",
-        "libaudioclient",
         "libaudioflinger",
         "libaudiopolicyservice",
+        "libmedialogservice",
+        "libnbaio",
+    ],
+
+    shared_libs: [
+        "libaudioclient",
         "libaudioprocessing",
         "libbinder",
         "libcutils",
         "libhidlbase",
         "liblog",
         "libmedia",
-        "libmedialogservice",
         "libmediautils",
-        "libnbaio",
         "libnblog",
         "libpowermanager",
         "libutils",
@@ -59,9 +69,9 @@
         "frameworks/av/services/audiopolicy/engine/interface",
         "frameworks/av/services/audiopolicy/service",
         "frameworks/av/services/medialog",
+        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
 
-        // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-        "frameworks/av/services/oboeservice",
+
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index d865ab2..721a12a 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -69,7 +69,7 @@
         addParameter(
                 DefineParam(mChannelCount, C2_PARAMKEY_CHANNEL_COUNT)
                 .withDefault(new C2StreamChannelCountInfo::input(0u, 1))
-                .withFields({C2F(mChannelCount, value).inRange(1, 6)})
+                .withFields({C2F(mChannelCount, value).inRange(1, kMaxChannelCount)})
                 .withSetter(Setter<decltype(*mChannelCount)>::StrictValueWithNoDeps)
                 .build());
 
@@ -198,10 +198,17 @@
 }
 
 c2_status_t C2SoftAacEnc::onFlush_sm() {
+    if (mAACEncoder != nullptr) {
+        /* encoder's internal input buffer needs to be reset during flush */
+        if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_CONTROL_STATE, AACENC_INIT_ALL)) {
+            ALOGE("Failed to reset AAC encoder");
+        }
+    }
     mSentCodecSpecificData = false;
     mInputSize = 0u;
     mNextFrameTimestampUs.reset();
     mLastFrameEndTimestampUs.reset();
+    mRemainderLen = 0;
     return C2_OK;
 }
 
@@ -562,6 +569,11 @@
                 inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t);
                 inargs.numInSamples -= outargs.numInSamples;
             }
+        } else {
+            // In case of error in encode call, discard remaining input bytes.
+            inBuffer[0] = nullptr;
+            inBufferSize[0] = 0;
+            inargs.numInSamples = 0;
         }
         ALOGV("encoderErr = %d mInputSize = %zu "
               "inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
@@ -597,10 +609,19 @@
                            &outBufDesc,
                            &inargs,
                            &outargs);
+
+        // after flush, discard remaining input bytes.
+        inBuffer[0] = nullptr;
         inBufferSize[0] = 0;
     }
 
     if (inBufferSize[0] > 0) {
+        if (inBufferSize[0] > kRemainderBufSize) {
+            ALOGE("Remaining bytes %d greater than remainder buffer size %zu", inBufferSize[0],
+                    kRemainderBufSize);
+            work->result = C2_CORRUPTED;
+            return;
+        }
         for (size_t i = 0; i < inBufferSize[0]; ++i) {
             mRemainder[i] = static_cast<uint8_t *>(inBuffer[0])[i];
         }
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
index 9a28280..c79526c 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.h
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -47,6 +47,9 @@
             const std::shared_ptr<C2BlockPool> &pool) override;
 
 private:
+    static constexpr size_t kMaxChannelCount = 6;
+    static constexpr size_t kRemainderBufSize = kMaxChannelCount * sizeof(int16_t);
+
     std::shared_ptr<IntfImpl> mIntf;
 
     HANDLE_AACENCODER mAACEncoder;
@@ -63,7 +66,7 @@
     std::atomic_uint64_t mOutIndex;
 
     // We support max 6 channels
-    uint8_t mRemainder[6 * sizeof(int16_t)];
+    uint8_t mRemainder[kRemainderBufSize];
     size_t mRemainderLen;
 
     status_t initEncoder();
diff --git a/media/codec2/components/dav1d/Android.bp b/media/codec2/components/dav1d/Android.bp
new file mode 100644
index 0000000..f7850ad
--- /dev/null
+++ b/media/codec2/components/dav1d/Android.bp
@@ -0,0 +1,37 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_library {
+    name: "libcodec2_soft_av1dec_dav1d",
+    // TODO: b/277797541 - enable once ready
+    enabled: false,
+
+    defaults: [
+        "libcodec2_soft-defaults",
+        "libcodec2_soft_sanitize_all-defaults",
+    ],
+
+    cflags: [
+        "-DCODECNAME=\"c2.android.dav1d-av1.decoder\"",
+        "-Wno-unused-variable",
+    ],
+
+    srcs: ["C2SoftDav1dDec.cpp"],
+    static_libs: [
+        "libyuv_static",
+        "libdav1d_8bit",
+        "libdav1d_16bit",
+    ],
+
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media.swcodec",
+    ],
+
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
new file mode 100644
index 0000000..b0cef41
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -0,0 +1,1493 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftDav1dDec"
+#include <android-base/properties.h>
+#include <cutils/properties.h>
+#include <thread>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+#include <libyuv.h>
+#include <log/log.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include "C2SoftDav1dDec.h"
+
+// libyuv version required for I410ToAB30Matrix and I210ToAB30Matrix.
+#if LIBYUV_VERSION >= 1780
+#include <algorithm>
+#define HAVE_LIBYUV_I410_I210_TO_AB30 1
+#else
+#define HAVE_LIBYUV_I410_I210_TO_AB30 0
+#endif
+
+namespace android {
+
+// Flag to enable dumping the bitsteram and the decoded pictures to files.
+static const bool ENABLE_DUMPING_FILES_DEFAULT = false;
+static const char ENABLE_DUMPING_FILES_PROPERTY[] = "debug.dav1d.enabledumping";
+
+// The number of frames to dump to a file
+static const int NUM_FRAMES_TO_DUMP_DEFAULT = INT_MAX;
+static const char NUM_FRAMES_TO_DUMP_PROPERTY[] = "debug.dav1d.numframestodump";
+
+// The number of threads used for the dav1d decoder.
+static const int NUM_THREADS_DAV1D_DEFAULT = 0;
+static const char NUM_THREADS_DAV1D_PROPERTY[] = "debug.dav1d.numthreads";
+
+// codecname set and passed in as a compile flag from Android.bp
+constexpr char COMPONENT_NAME[] = CODECNAME;
+
+constexpr size_t kMinInputBufferSize = 2 * 1024 * 1024;
+
+class C2SoftDav1dDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+  public:
+    explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper>& helper)
+        : SimpleInterface<void>::BaseParams(helper, COMPONENT_NAME, C2Component::KIND_DECODER,
+                                            C2Component::DOMAIN_VIDEO, MEDIA_MIMETYPE_VIDEO_AV1) {
+        noPrivateBuffers();
+        noInputReferences();
+        noOutputReferences();
+        noInputLatency();
+        noTimeStretch();
+
+        addParameter(DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+                             .withConstValue(new C2ComponentAttributesSetting(
+                                     C2Component::ATTRIB_IS_TEMPORAL))
+                             .build());
+
+        addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+                             .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 4096),
+                                     C2F(mSize, height).inRange(2, 4096),
+                             })
+                             .withSetter(SizeSetter)
+                             .build());
+
+        addParameter(DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+                             .withDefault(new C2StreamProfileLevelInfo::input(
+                                     0u, C2Config::PROFILE_AV1_0, C2Config::LEVEL_AV1_2_1))
+                             .withFields({C2F(mProfileLevel, profile)
+                                                  .oneOf({C2Config::PROFILE_AV1_0,
+                                                          C2Config::PROFILE_AV1_1}),
+                                          C2F(mProfileLevel, level)
+                                                  .oneOf({
+                                                          C2Config::LEVEL_AV1_2,
+                                                          C2Config::LEVEL_AV1_2_1,
+                                                          C2Config::LEVEL_AV1_2_2,
+                                                          C2Config::LEVEL_AV1_2_3,
+                                                          C2Config::LEVEL_AV1_3,
+                                                          C2Config::LEVEL_AV1_3_1,
+                                                          C2Config::LEVEL_AV1_3_2,
+                                                          C2Config::LEVEL_AV1_3_3,
+                                                          C2Config::LEVEL_AV1_4,
+                                                          C2Config::LEVEL_AV1_4_1,
+                                                          C2Config::LEVEL_AV1_4_2,
+                                                          C2Config::LEVEL_AV1_4_3,
+                                                          C2Config::LEVEL_AV1_5,
+                                                          C2Config::LEVEL_AV1_5_1,
+                                                          C2Config::LEVEL_AV1_5_2,
+                                                          C2Config::LEVEL_AV1_5_3,
+                                                  })})
+                             .withSetter(ProfileLevelSetter, mSize)
+                             .build());
+
+        mHdr10PlusInfoInput = C2StreamHdr10PlusInfo::input::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoInput, C2_PARAMKEY_INPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoInput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoInput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoInputSetter)
+                             .build());
+
+        mHdr10PlusInfoOutput = C2StreamHdr10PlusInfo::output::AllocShared(0);
+        addParameter(DefineParam(mHdr10PlusInfoOutput, C2_PARAMKEY_OUTPUT_HDR10_PLUS_INFO)
+                             .withDefault(mHdr10PlusInfoOutput)
+                             .withFields({
+                                     C2F(mHdr10PlusInfoOutput, m.value).any(),
+                             })
+                             .withSetter(Hdr10PlusInfoOutputSetter)
+                             .build());
+
+        // default static info
+        C2HdrStaticMetadataStruct defaultStaticInfo{};
+        helper->addStructDescriptors<C2MasteringDisplayColorVolumeStruct, C2ColorXyStruct>();
+        addParameter(
+                DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO)
+                        .withDefault(new C2StreamHdrStaticInfo::output(0u, defaultStaticInfo))
+                        .withFields({C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.red.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.green.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.blue.y).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.white.x).inRange(0, 1),
+                                     C2F(mHdrStaticInfo, mastering.maxLuminance).inRange(0, 65535),
+                                     C2F(mHdrStaticInfo, mastering.minLuminance).inRange(0, 6.5535),
+                                     C2F(mHdrStaticInfo, maxCll).inRange(0, 0XFFFF),
+                                     C2F(mHdrStaticInfo, maxFall).inRange(0, 0XFFFF)})
+                        .withSetter(HdrStaticInfoSetter)
+                        .build());
+
+        addParameter(DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+                             .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+                             .withFields({
+                                     C2F(mSize, width).inRange(2, 2048, 2),
+                                     C2F(mSize, height).inRange(2, 2048, 2),
+                             })
+                             .withSetter(MaxPictureSizeSetter, mSize)
+                             .build());
+
+        addParameter(
+                DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+                        .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, kMinInputBufferSize))
+                        .withFields({
+                                C2F(mMaxInputSize, value).any(),
+                        })
+                        .calculatedAs(MaxInputSizeSetter, mMaxSize)
+                        .build());
+
+        C2ChromaOffsetStruct locations[1] = {C2ChromaOffsetStruct::ITU_YUV_420_0()};
+        std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+                C2StreamColorInfo::output::AllocShared(1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+        defaultColorInfo = C2StreamColorInfo::output::AllocShared(
+                {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+        helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+        addParameter(DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+                             .withConstValue(defaultColorInfo)
+                             .build());
+
+        addParameter(DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsTuning::output(
+                                     0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mDefaultColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mDefaultColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mDefaultColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mDefaultColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(DefaultColorAspectsSetter)
+                             .build());
+
+        addParameter(DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+                             .withDefault(new C2StreamColorAspectsInfo::input(
+                                     0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+                                     C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                             .withFields({C2F(mCodedColorAspects, range)
+                                                  .inRange(C2Color::RANGE_UNSPECIFIED,
+                                                           C2Color::RANGE_OTHER),
+                                          C2F(mCodedColorAspects, primaries)
+                                                  .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                           C2Color::PRIMARIES_OTHER),
+                                          C2F(mCodedColorAspects, transfer)
+                                                  .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                           C2Color::TRANSFER_OTHER),
+                                          C2F(mCodedColorAspects, matrix)
+                                                  .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                           C2Color::MATRIX_OTHER)})
+                             .withSetter(CodedColorAspectsSetter)
+                             .build());
+
+        addParameter(
+                DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+                        .withDefault(new C2StreamColorAspectsInfo::output(
+                                0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+                                C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+                        .withFields(
+                                {C2F(mColorAspects, range)
+                                         .inRange(C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+                                 C2F(mColorAspects, primaries)
+                                         .inRange(C2Color::PRIMARIES_UNSPECIFIED,
+                                                  C2Color::PRIMARIES_OTHER),
+                                 C2F(mColorAspects, transfer)
+                                         .inRange(C2Color::TRANSFER_UNSPECIFIED,
+                                                  C2Color::TRANSFER_OTHER),
+                                 C2F(mColorAspects, matrix)
+                                         .inRange(C2Color::MATRIX_UNSPECIFIED,
+                                                  C2Color::MATRIX_OTHER)})
+                        .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+                        .build());
+
+        std::vector<uint32_t> pixelFormats = {HAL_PIXEL_FORMAT_YCBCR_420_888};
+        if (isHalPixelFormatSupported((AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010)) {
+            pixelFormats.push_back(HAL_PIXEL_FORMAT_YCBCR_P010);
+        }
+        // If color format surface isn't added to supported formats, there is no way to know
+        // when the color-format is configured to surface. This is necessary to be able to
+        // choose 10-bit format while decoding 10-bit clips in surface mode.
+        pixelFormats.push_back(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        // TODO: support more formats?
+        addParameter(DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+                             .withDefault(new C2StreamPixelFormatInfo::output(
+                                     0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+                             .withFields({C2F(mPixelFormat, value).oneOf(pixelFormats)})
+                             .withSetter((Setter<decltype(*mPixelFormat)>::StrictValueWithNoDeps))
+                             .build());
+    }
+
+    static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output>& oldMe,
+                          C2P<C2StreamPictureSizeInfo::output>& me) {
+        (void)mayBlock;
+        C2R res = C2R::Ok();
+        if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+            me.set().width = oldMe.v.width;
+        }
+        if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+            res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+            me.set().height = oldMe.v.height;
+        }
+        return res;
+    }
+
+    static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output>& me,
+                                    const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        // TODO: get max width/height from the size's field helpers vs.
+        // hardcoding
+        me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u);
+        me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u);
+        return C2R::Ok();
+    }
+
+    static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input>& me,
+                                  const C2P<C2StreamMaxPictureSizeTuning::output>& maxSize) {
+        (void)mayBlock;
+        // assume compression ratio of 2, but enforce a floor
+        me.set().value =
+                c2_max((((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072),
+                       kMinInputBufferSize);
+        return C2R::Ok();
+    }
+
+    static C2R DefaultColorAspectsSetter(bool mayBlock,
+                                         C2P<C2StreamColorAspectsTuning::output>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input>& me) {
+        (void)mayBlock;
+        if (me.v.range > C2Color::RANGE_OTHER) {
+            me.set().range = C2Color::RANGE_OTHER;
+        }
+        if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+            me.set().primaries = C2Color::PRIMARIES_OTHER;
+        }
+        if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+            me.set().transfer = C2Color::TRANSFER_OTHER;
+        }
+        if (me.v.matrix > C2Color::MATRIX_OTHER) {
+            me.set().matrix = C2Color::MATRIX_OTHER;
+        }
+        return C2R::Ok();
+    }
+
+    static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output>& me,
+                                  const C2P<C2StreamColorAspectsTuning::output>& def,
+                                  const C2P<C2StreamColorAspectsInfo::input>& coded) {
+        (void)mayBlock;
+        // take default values for all unspecified fields, and coded values for specified ones
+        me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+        me.set().primaries =
+                coded.v.primaries == PRIMARIES_UNSPECIFIED ? def.v.primaries : coded.v.primaries;
+        me.set().transfer =
+                coded.v.transfer == TRANSFER_UNSPECIFIED ? def.v.transfer : coded.v.transfer;
+        me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+        return C2R::Ok();
+    }
+
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input>& me,
+                                  const C2P<C2StreamPictureSizeInfo::output>& size) {
+        (void)mayBlock;
+        (void)size;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    std::shared_ptr<C2StreamColorAspectsTuning::output> getDefaultColorAspects_l() {
+        return mDefaultColorAspects;
+    }
+
+    std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() { return mColorAspects; }
+
+    static C2R Hdr10PlusInfoInputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::input>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    static C2R Hdr10PlusInfoOutputSetter(bool mayBlock, C2P<C2StreamHdr10PlusInfo::output>& me) {
+        (void)mayBlock;
+        (void)me;  // TODO: validate
+        return C2R::Ok();
+    }
+
+    // unsafe getters
+    std::shared_ptr<C2StreamPixelFormatInfo::output> getPixelFormat_l() const {
+        return mPixelFormat;
+    }
+
+    static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
+        (void)mayBlock;
+        if (me.v.mastering.red.x > 1) {
+            me.set().mastering.red.x = 1;
+        }
+        if (me.v.mastering.red.y > 1) {
+            me.set().mastering.red.y = 1;
+        }
+        if (me.v.mastering.green.x > 1) {
+            me.set().mastering.green.x = 1;
+        }
+        if (me.v.mastering.green.y > 1) {
+            me.set().mastering.green.y = 1;
+        }
+        if (me.v.mastering.blue.x > 1) {
+            me.set().mastering.blue.x = 1;
+        }
+        if (me.v.mastering.blue.y > 1) {
+            me.set().mastering.blue.y = 1;
+        }
+        if (me.v.mastering.white.x > 1) {
+            me.set().mastering.white.x = 1;
+        }
+        if (me.v.mastering.white.y > 1) {
+            me.set().mastering.white.y = 1;
+        }
+        if (me.v.mastering.maxLuminance > 65535.0) {
+            me.set().mastering.maxLuminance = 65535.0;
+        }
+        if (me.v.mastering.minLuminance > 6.5535) {
+            me.set().mastering.minLuminance = 6.5535;
+        }
+        if (me.v.maxCll > 65535.0) {
+            me.set().maxCll = 65535.0;
+        }
+        if (me.v.maxFall > 65535.0) {
+            me.set().maxFall = 65535.0;
+        }
+        return C2R::Ok();
+    }
+
+  private:
+    std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+    std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+    std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+    std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+    std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+    std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+    std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
+    std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
+    std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+};
+
+C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
+                               const std::shared_ptr<IntfImpl>& intfImpl)
+    : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+      mIntf(intfImpl) {
+    mTimeStart = mTimeEnd = systemTime();
+}
+
+C2SoftDav1dDec::~C2SoftDav1dDec() {
+    onRelease();
+}
+
+c2_status_t C2SoftDav1dDec::onInit() {
+    return initDecoder() ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftDav1dDec::onStop() {
+    // TODO: b/277797541 - investigate if the decoder needs to be flushed.
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    return C2_OK;
+}
+
+void C2SoftDav1dDec::onReset() {
+    (void)onStop();
+    c2_status_t err = onFlush_sm();
+    if (err != C2_OK) {
+        ALOGW("Failed to flush the av1 decoder. Trying to hard reset.");
+        destroyDecoder();
+        if (!initDecoder()) {
+            ALOGE("Hard reset failed.");
+        }
+    }
+}
+
+void C2SoftDav1dDec::flushDav1d() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+
+        while (mDecodedPictures.size() > 0) {
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        int res = 0;
+        while (true) {
+            memset(&p, 0, sizeof(p));
+
+            if ((res = dav1d_get_picture(mDav1dCtx, &p)) < 0) {
+                if (res != DAV1D_ERR(EAGAIN)) {
+                    ALOGE("Error decoding frame: %s\n", strerror(DAV1D_ERR(res)));
+                    break;
+                } else {
+                    res = 0;
+                    break;
+                }
+            } else {
+                dav1d_picture_unref(&p);
+            }
+        }
+
+        dav1d_flush(mDav1dCtx);
+    }
+}
+
+void C2SoftDav1dDec::onRelease() {
+    destroyDecoder();
+}
+
+c2_status_t C2SoftDav1dDec::onFlush_sm() {
+    flushDav1d();
+
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+
+    return C2_OK;
+}
+
+static int GetCPUCoreCount() {
+    int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+    cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+    // _SC_NPROC_ONLN must be defined...
+    cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+    CHECK(cpuCoreCount >= 1);
+    ALOGV("Number of CPU cores: %d", cpuCoreCount);
+    return cpuCoreCount;
+}
+
+bool C2SoftDav1dDec::initDecoder() {
+    nsecs_t now = systemTime();
+#ifdef FILE_DUMP_ENABLE
+    snprintf(mInDataFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_DATA_DUMP_EXT);
+    snprintf(mInSizeFileName, 256, "%s_%" PRId64 "d.%s", DUMP_FILE_PATH, now, INPUT_SIZE_DUMP_EXT);
+    snprintf(mDav1dOutYuvFileName, 256, "%s_%" PRId64 "dx.%s", DUMP_FILE_PATH, now,
+             OUTPUT_YUV_DUMP_EXT);
+
+    bool enableDumping = android::base::GetBoolProperty(ENABLE_DUMPING_FILES_PROPERTY,
+                                                        ENABLE_DUMPING_FILES_DEFAULT);
+
+    num_frames_to_dump =
+            android::base::GetIntProperty(NUM_FRAMES_TO_DUMP_PROPERTY, NUM_FRAMES_TO_DUMP_DEFAULT);
+
+    if (enableDumping) {
+        ALOGD("enableDumping = %d, num_frames_to_dump = %d", enableDumping, num_frames_to_dump);
+
+        mInDataFile = fopen(mInDataFileName, "wb");
+        if (mInDataFile == nullptr) {
+            ALOGD("Could not open file %s", mInDataFileName);
+        }
+
+        mInSizeFile = fopen(mInSizeFileName, "wb");
+        if (mInSizeFile == nullptr) {
+            ALOGD("Could not open file %s", mInSizeFileName);
+        }
+
+        mDav1dOutYuvFile = fopen(mDav1dOutYuvFileName, "wb");
+        if (mDav1dOutYuvFile == nullptr) {
+            ALOGD("Could not open file %s", mDav1dOutYuvFileName);
+        }
+    }
+#endif
+    mSignalledError = false;
+    mSignalledOutputEos = false;
+    mHalPixelFormat = HAL_PIXEL_FORMAT_YV12;
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        mPixelFormatInfo = mIntf->getPixelFormat_l();
+    }
+
+    const char* version = dav1d_version();
+
+    Dav1dSettings lib_settings;
+    dav1d_default_settings(&lib_settings);
+    int cpu_count = GetCPUCoreCount();
+    lib_settings.n_threads = std::max(cpu_count / 2, 1);  // use up to half the cores by default.
+
+    int32_t numThreads =
+            android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
+    if (numThreads > 0) lib_settings.n_threads = numThreads;
+
+    int res = 0;
+    if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
+        ALOGE("dav1d_open failed. status: %d.", res);
+        return false;
+    } else {
+        ALOGD("dav1d_open succeeded(n_threads=%d,version=%s).", lib_settings.n_threads, version);
+    }
+
+    return true;
+}
+
+void C2SoftDav1dDec::destroyDecoder() {
+    if (mDav1dCtx) {
+        Dav1dPicture p;
+        while (mDecodedPictures.size() > 0) {
+            memset(&p, 0, sizeof(p));
+            p = mDecodedPictures.front();
+            mDecodedPictures.pop_front();
+
+            dav1d_picture_unref(&p);
+        }
+
+        dav1d_close(&mDav1dCtx);
+        mDav1dCtx = nullptr;
+        mOutputBufferIndex = 0;
+        mInputBufferIndex = 0;
+    }
+#ifdef FILE_DUMP_ENABLE
+    if (mInDataFile != nullptr) {
+        fclose(mInDataFile);
+        mInDataFile = nullptr;
+    }
+
+    if (mInSizeFile != nullptr) {
+        fclose(mInSizeFile);
+        mInSizeFile = nullptr;
+    }
+
+    if (mDav1dOutYuvFile != nullptr) {
+        fclose(mDav1dOutYuvFile);
+        mDav1dOutYuvFile = nullptr;
+    }
+#endif
+}
+
+void fillEmptyWork(const std::unique_ptr<C2Work>& work) {
+    uint32_t flags = 0;
+    if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+        flags |= C2FrameData::FLAG_END_OF_STREAM;
+        ALOGV("signalling end_of_stream.");
+    }
+    work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+    work->worklets.front()->output.buffers.clear();
+    work->worklets.front()->output.ordinal = work->input.ordinal;
+    work->workletsProcessed = 1u;
+}
+
+void C2SoftDav1dDec::finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                                const std::shared_ptr<C2GraphicBlock>& block) {
+    std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(block, C2Rect(mWidth, mHeight));
+    {
+        IntfImpl::Lock lock = mIntf->lock();
+        buffer->setInfo(mIntf->getColorAspects_l());
+    }
+    auto fillWork = [buffer, index](const std::unique_ptr<C2Work>& work) {
+        uint32_t flags = 0;
+        if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
+            (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) {
+            flags |= C2FrameData::FLAG_END_OF_STREAM;
+            ALOGV("signalling end_of_stream.");
+        }
+        work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+        work->worklets.front()->output.buffers.clear();
+        work->worklets.front()->output.buffers.push_back(buffer);
+        work->worklets.front()->output.ordinal = work->input.ordinal;
+        work->workletsProcessed = 1u;
+    };
+    if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+        fillWork(work);
+    } else {
+        finish(index, fillWork);
+    }
+}
+
+void C2SoftDav1dDec::process(const std::unique_ptr<C2Work>& work,
+                             const std::shared_ptr<C2BlockPool>& pool) {
+    work->result = C2_OK;
+    work->workletsProcessed = 0u;
+    work->worklets.front()->output.configUpdate.clear();
+    work->worklets.front()->output.flags = work->input.flags;
+    if (mSignalledError || mSignalledOutputEos) {
+        work->result = C2_BAD_VALUE;
+        return;
+    }
+
+    size_t inOffset = 0u;
+    size_t inSize = 0u;
+    C2ReadView rView = mDummyReadView;
+    if (!work->input.buffers.empty()) {
+        rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+        inSize = rView.capacity();
+        if (inSize && rView.error()) {
+            ALOGE("read view map failed %d", rView.error());
+            work->result = C2_CORRUPTED;
+            return;
+        }
+    }
+
+    bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0);
+    bool end_of_stream = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+
+    if (codecConfig) {
+        fillEmptyWork(work);
+        return;
+    }
+
+    int64_t in_frameIndex = work->input.ordinal.frameIndex.peekll();
+    if (inSize) {
+        mInputBufferIndex = in_frameIndex;
+
+        uint8_t* bitstream = const_cast<uint8_t*>(rView.data() + inOffset);
+
+        mTimeStart = systemTime();
+        nsecs_t delay = mTimeStart - mTimeEnd;
+
+        // Send the bitstream data (inputBuffer) to dav1d.
+        if (mDav1dCtx) {
+            int i_ret = 0;
+
+            Dav1dSequenceHeader seq;
+            int res = dav1d_parse_sequence_header(&seq, bitstream, inSize);
+            if (res == 0) {
+                ALOGV("dav1d found a sequenceHeader (%dx%d) for in_frameIndex=%ld.", seq.max_width,
+                      seq.max_height, (long)in_frameIndex);
+            }
+
+            // insert OBU TD if it is not present.
+            // TODO: b/286852962
+            uint8_t obu_type = (bitstream[0] >> 3) & 0xf;
+            Dav1dData data;
+
+            uint8_t* ptr = (obu_type == DAV1D_OBU_TD) ? dav1d_data_create(&data, inSize)
+                                                      : dav1d_data_create(&data, inSize + 2);
+            if (ptr == nullptr) {
+                ALOGE("dav1d_data_create failed!");
+                i_ret = -1;
+
+            } else {
+                data.m.timestamp = in_frameIndex;
+
+                int new_Size;
+                if (obu_type != DAV1D_OBU_TD) {
+                    new_Size = (int)(inSize + 2);
+
+                    // OBU TD
+                    ptr[0] = 0x12;
+                    ptr[1] = 0;
+
+                    memcpy(ptr + 2, bitstream, inSize);
+                } else {
+                    new_Size = (int)(inSize);
+                    // TODO: b/277797541 - investigate how to wrap this pointer in Dav1dData to
+                    // avoid memcopy operations.
+                    memcpy(ptr, bitstream, new_Size);
+                }
+
+                // ALOGV("memcpy(ptr,bitstream,inSize=%ld,new_Size=%d,in_frameIndex=%ld,timestamp=%ld,"
+                //       "ptr[0,1,2,3,4]=%x,%x,%x,%x,%x)",
+                //       inSize, new_Size, frameIndex, data.m.timestamp, ptr[0], ptr[1], ptr[2],
+                //       ptr[3], ptr[4]);
+
+                // Dump the bitstream data (inputBuffer) if dumping is enabled.
+#ifdef FILE_DUMP_ENABLE
+                if (mInDataFile) {
+                    int ret = fwrite(ptr, 1, new_Size, mInDataFile);
+
+                    if (ret != new_Size) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mInDataFileName,
+                              new_Size, ret);
+                    }
+                }
+
+                // Dump the size per inputBuffer if dumping is enabled.
+                if (mInSizeFile) {
+                    int ret = fwrite(&new_Size, 1, 4, mInSizeFile);
+
+                    if (ret != 4) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mInSizeFileName, 4,
+                              ret);
+                    }
+                }
+#endif
+
+                bool b_draining = false;
+                int res;
+
+                do {
+                    res = dav1d_send_data(mDav1dCtx, &data);
+                    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
+                        ALOGE("Decoder feed error %s!", strerror(DAV1D_ERR(res)));
+                        /* bitstream decoding errors (typically DAV1D_ERR(EINVAL), are assumed
+                         * to be recoverable. Other errors returned from this function are
+                         * either unexpected, or considered critical failures.
+                         */
+                        i_ret = res == DAV1D_ERR(EINVAL) ? 0 : -1;
+                        break;
+                    }
+
+                    bool b_output_error = false;
+
+                    do {
+                        Dav1dPicture img;
+                        memset(&img, 0, sizeof(img));
+
+                        res = dav1d_get_picture(mDav1dCtx, &img);
+                        if (res == 0) {
+                            mDecodedPictures.push_back(img);
+
+                            if (!end_of_stream) break;
+                        } else if (res == DAV1D_ERR(EAGAIN)) {
+                            /* the decoder needs more data to be able to output something.
+                             * if there is more data pending, continue the loop below or
+                             * otherwise break */
+                            if (data.sz != 0) res = 0;
+                            break;
+                        } else {
+                            ALOGE("warning! Decoder error %d!", res);
+                            b_output_error = true;
+                            break;
+                        }
+                    } while (res == 0);
+
+                    if (b_output_error) break;
+
+                    /* on drain, we must ignore the 1st EAGAIN */
+                    if (!b_draining && (res == DAV1D_ERR(EAGAIN) || res == 0) &&
+                        (end_of_stream)) {
+                        b_draining = true;
+                        res = 0;
+                    }
+                } while (res == 0 && ((data.sz != 0) || b_draining));
+
+                if (data.sz > 0) {
+                    ALOGE("unexpected data.sz=%zu after dav1d_send_data", data.sz);
+                    dav1d_data_unref(&data);
+                }
+            }
+
+            mTimeEnd = systemTime();
+            nsecs_t decodeTime = mTimeEnd - mTimeStart;
+            // ALOGV("decodeTime=%4" PRId64 " delay=%4" PRId64 "\n", decodeTime, delay);
+
+            if (i_ret != 0) {
+                ALOGE("av1 decoder failed to decode frame. status: %d.", i_ret);
+                work->result = C2_CORRUPTED;
+                work->workletsProcessed = 1u;
+                mSignalledError = true;
+                return;
+            }
+        }
+    }
+
+    (void)outputBuffer(pool, work);
+
+    if (end_of_stream) {
+        drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+        mSignalledOutputEos = true;
+    } else if (!inSize) {
+        fillEmptyWork(work);
+    }
+}
+
+void C2SoftDav1dDec::getHDRStaticParams(Dav1dPicture* picture,
+                                        const std::unique_ptr<C2Work>& work) {
+    C2StreamHdrStaticMetadataInfo::output hdrStaticMetadataInfo{};
+    bool infoPresent = false;
+
+    if (picture != nullptr) {
+        if (picture->mastering_display != nullptr) {
+            hdrStaticMetadataInfo.mastering.red.x =
+                    picture->mastering_display->primaries[0][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.red.y =
+                    picture->mastering_display->primaries[0][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.green.x =
+                    picture->mastering_display->primaries[1][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.green.y =
+                    picture->mastering_display->primaries[1][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.blue.x =
+                    picture->mastering_display->primaries[2][0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.blue.y =
+                    picture->mastering_display->primaries[2][1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.white.x =
+                    picture->mastering_display->white_point[0] / 65536.0;
+            hdrStaticMetadataInfo.mastering.white.y =
+                    picture->mastering_display->white_point[1] / 65536.0;
+
+            hdrStaticMetadataInfo.mastering.maxLuminance =
+                    picture->mastering_display->max_luminance / 256.0;
+            hdrStaticMetadataInfo.mastering.minLuminance =
+                    picture->mastering_display->min_luminance / 16384.0;
+
+            infoPresent = true;
+        }
+
+        if (picture->content_light != nullptr) {
+            hdrStaticMetadataInfo.maxCll = picture->content_light->max_content_light_level;
+            hdrStaticMetadataInfo.maxFall = picture->content_light->max_frame_average_light_level;
+            infoPresent = true;
+        }
+    }
+
+    // if (infoPresent) {
+    //   ALOGD("received a hdrStaticMetadataInfo (mastering.red=%f,%f mastering.green=%f,%f
+    //   mastering.blue=%f,%f mastering.white=%f,%f mastering.maxLuminance=%f
+    //   mastering.minLuminance=%f maxCll=%f maxFall=%f) at mOutputBufferIndex=%d.",
+    //   hdrStaticMetadataInfo.mastering.red.x,hdrStaticMetadataInfo.mastering.red.y,
+    //   hdrStaticMetadataInfo.mastering.green.x,hdrStaticMetadataInfo.mastering.green.y,
+    //   hdrStaticMetadataInfo.mastering.blue.x,hdrStaticMetadataInfo.mastering.blue.y,
+    //   hdrStaticMetadataInfo.mastering.white.x,hdrStaticMetadataInfo.mastering.white.y,
+    //   hdrStaticMetadataInfo.mastering.maxLuminance,hdrStaticMetadataInfo.mastering.minLuminance,
+    //   hdrStaticMetadataInfo.maxCll,
+    //   hdrStaticMetadataInfo.maxFall,
+    //   mOutputBufferIndex);
+    // }
+
+    // config if static info has changed
+    if (infoPresent && !(hdrStaticMetadataInfo == mHdrStaticMetadataInfo)) {
+        mHdrStaticMetadataInfo = hdrStaticMetadataInfo;
+        work->worklets.front()->output.configUpdate.push_back(
+                C2Param::Copy(mHdrStaticMetadataInfo));
+    }
+}
+
+void C2SoftDav1dDec::getHDR10PlusInfoData(Dav1dPicture* picture,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (picture != nullptr) {
+        if (picture->itut_t35 != nullptr) {
+            std::vector<uint8_t> payload;
+            size_t payloadSize = picture->itut_t35->payload_size;
+            if (payloadSize > 0) {
+                payload.push_back(picture->itut_t35->country_code);
+                if (picture->itut_t35->country_code == 0xFF) {
+                    payload.push_back(picture->itut_t35->country_code_extension_byte);
+                }
+                payload.insert(payload.end(), picture->itut_t35->payload,
+                               picture->itut_t35->payload + picture->itut_t35->payload_size);
+            }
+
+            std::unique_ptr<C2StreamHdr10PlusInfo::output> hdr10PlusInfo =
+                    C2StreamHdr10PlusInfo::output::AllocUnique(payload.size());
+            if (!hdr10PlusInfo) {
+                ALOGE("Hdr10PlusInfo allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                return;
+            }
+            memcpy(hdr10PlusInfo->m.value, payload.data(), payload.size());
+
+            // ALOGD("Received a hdr10PlusInfo from picture->itut_t32
+            // (payload_size=%ld,country_code=%d) at mOutputBufferIndex=%d.",
+            // picture->itut_t35->payload_size,
+            // picture->itut_t35->country_code,
+            // mOutputBufferIndex);
+
+            // config if hdr10Plus info has changed
+            if (nullptr == mHdr10PlusInfo || !(*hdr10PlusInfo == *mHdr10PlusInfo)) {
+                mHdr10PlusInfo = std::move(hdr10PlusInfo);
+                work->worklets.front()->output.configUpdate.push_back(std::move(mHdr10PlusInfo));
+            }
+        }
+    }
+}
+
+void C2SoftDav1dDec::getVuiParams(Dav1dPicture* picture) {
+    VuiColorAspects vuiColorAspects;
+
+    if (picture) {
+        vuiColorAspects.primaries = picture->seq_hdr->pri;
+        vuiColorAspects.transfer = picture->seq_hdr->trc;
+        vuiColorAspects.coeffs = picture->seq_hdr->mtrx;
+        vuiColorAspects.fullRange = picture->seq_hdr->color_range;
+
+        // ALOGD("Received a vuiColorAspects from dav1d
+        //       (primaries = % d, transfer = % d, coeffs = % d, fullRange = % d)
+        //               at mOutputBufferIndex = % d,
+        //       out_frameIndex = % ld.",
+        //                          vuiColorAspects.primaries,
+        //       vuiColorAspects.transfer, vuiColorAspects.coeffs, vuiColorAspects.fullRange,
+        //       mOutputBufferIndex, picture->m.timestamp);
+    }
+
+    // convert vui aspects to C2 values if changed
+    if (!(vuiColorAspects == mBitstreamColorAspects)) {
+        mBitstreamColorAspects = vuiColorAspects;
+        ColorAspects sfAspects;
+        C2StreamColorAspectsInfo::input codedAspects = {0u};
+        ColorUtils::convertIsoColorAspectsToCodecAspects(
+                vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+                vuiColorAspects.fullRange, sfAspects);
+        if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+            codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+            codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+            codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+        }
+        if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+            codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+        }
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+    }
+}
+
+void C2SoftDav1dDec::setError(const std::unique_ptr<C2Work>& work, c2_status_t error) {
+    mSignalledError = true;
+    work->result = error;
+    work->workletsProcessed = 1u;
+}
+
+bool C2SoftDav1dDec::allocTmpFrameBuffer(size_t size) {
+    if (size > mTmpFrameBufferSize) {
+        mTmpFrameBuffer = std::make_unique<uint16_t[]>(size);
+        if (mTmpFrameBuffer == nullptr) {
+            mTmpFrameBufferSize = 0;
+            return false;
+        }
+        mTmpFrameBufferSize = size;
+    }
+    return true;
+}
+
+#ifdef FILE_DUMP_ENABLE
+void C2SoftDav1dDec::writeDav1dOutYuvFile(const Dav1dPicture& p) {
+    if (mDav1dOutYuvFile != NULL) {
+        uint8_t* ptr;
+        const int hbd = p.p.bpc > 8;
+
+        ptr = (uint8_t*)p.data[0];
+        for (int y = 0; y < p.p.h; y++) {
+            int iSize = p.p.w << hbd;
+            int ret = fwrite(ptr, 1, iSize, mDav1dOutYuvFile);
+            if (ret != iSize) {
+                ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName, iSize,
+                      ret);
+                break;
+            }
+
+            ptr += p.stride[0];
+        }
+
+        if (p.p.layout != DAV1D_PIXEL_LAYOUT_I400) {
+            // u/v
+            const int ss_ver = p.p.layout == DAV1D_PIXEL_LAYOUT_I420;
+            const int ss_hor = p.p.layout != DAV1D_PIXEL_LAYOUT_I444;
+            const int cw = (p.p.w + ss_hor) >> ss_hor;
+            const int ch = (p.p.h + ss_ver) >> ss_ver;
+            for (int pl = 1; pl <= 2; pl++) {
+                ptr = (uint8_t*)p.data[pl];
+                for (int y = 0; y < ch; y++) {
+                    int iSize = cw << hbd;
+                    int ret = fwrite(ptr, 1, cw << hbd, mDav1dOutYuvFile);
+                    if (ret != iSize) {
+                        ALOGE("Error in fwrite %s, requested %d, returned %d", mDav1dOutYuvFileName,
+                              iSize, ret);
+                        break;
+                    }
+                    ptr += p.stride[1];
+                }
+            }
+        }
+    }
+}
+#endif
+
+bool C2SoftDav1dDec::outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                                  const std::unique_ptr<C2Work>& work) {
+    if (!(work && pool)) return false;
+    if (mDav1dCtx == nullptr) return false;
+
+    // Get a decoded picture from dav1d if it is enabled.
+    Dav1dPicture img;
+    memset(&img, 0, sizeof(img));
+
+    int res = 0;
+    if (mDecodedPictures.size() > 0) {
+        img = mDecodedPictures.front();
+        mDecodedPictures.pop_front();
+        // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from the deque for
+        // outputBuffer.",img.m.timestamp,img.m.timestamp);
+    } else {
+        res = dav1d_get_picture(mDav1dCtx, &img);
+        if (res == 0) {
+            // ALOGD("Got a picture(out_frameIndex=%ld,timestamp=%ld) from dav1d for
+            // outputBuffer.",img.m.timestamp,img.m.timestamp);
+        } else {
+            ALOGE("failed to get a picture from dav1d for outputBuffer.");
+        }
+    }
+
+    if (res == DAV1D_ERR(EAGAIN)) {
+        ALOGD("Not enough data to output a picture.");
+        return false;
+    }
+    if (res != 0) {
+        ALOGE("The AV1 decoder failed to get a picture (res=%s).", strerror(DAV1D_ERR(res)));
+        return false;
+    }
+
+    const int width = img.p.w;
+    const int height = img.p.h;
+    if (width != mWidth || height != mHeight) {
+        mWidth = width;
+        mHeight = height;
+
+        C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size));
+        } else {
+            ALOGE("Config update size failed");
+            mSignalledError = true;
+            work->result = C2_CORRUPTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+    }
+
+    getVuiParams(&img);
+    getHDRStaticParams(&img, work);
+    getHDR10PlusInfoData(&img, work);
+
+    // out_frameIndex that the decoded picture returns from dav1d.
+    int64_t out_frameIndex = img.m.timestamp;
+
+#if LIBYUV_VERSION < 1779
+    if (!(img.p.layout != DAV1D_PIXEL_LAYOUT_I400 || img.p.layout != DAV1D_PIXEL_LAYOUT_I420)) {
+        ALOGE("image_format %d not supported", img.p.layout);
+        mSignalledError = true;
+        work->workletsProcessed = 1u;
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+#endif
+
+    const bool isMonochrome = img.p.layout == DAV1D_PIXEL_LAYOUT_I400;
+
+    int bitdepth = img.p.bpc;
+
+    std::shared_ptr<C2GraphicBlock> block;
+    uint32_t format = HAL_PIXEL_FORMAT_YV12;
+    std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
+    if (bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+        IntfImpl::Lock lock = mIntf->lock();
+        codedColorAspects = mIntf->getColorAspects_l();
+        bool allowRGBA1010102 = false;
+        if (codedColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
+            codedColorAspects->matrix == C2Color::MATRIX_BT2020 &&
+            codedColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+            allowRGBA1010102 = true;
+        }
+        format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
+#if !HAVE_LIBYUV_I410_I210_TO_AB30
+        if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
+            (is_img_ready ? img.p.layout == DAV1D_PIXEL_LAYOUT_I420
+                          : buffer->image_format != libgav1::kImageFormatYuv420)) {
+            ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+            mSignalledError = true;
+            work->result = C2_OMITTED;
+            work->workletsProcessed = 1u;
+            return false;
+        }
+#endif
+    }
+
+    if (mHalPixelFormat != format) {
+        C2StreamPixelFormatInfo::output pixelFormat(0u, format);
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = mIntf->config({&pixelFormat}, C2_MAY_BLOCK, &failures);
+        if (err == C2_OK) {
+            work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(pixelFormat));
+        } else {
+            ALOGE("Config update pixelFormat failed");
+            mSignalledError = true;
+            work->workletsProcessed = 1u;
+            work->result = C2_CORRUPTED;
+            return UNKNOWN_ERROR;
+        }
+        mHalPixelFormat = format;
+    }
+
+    C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+
+    // We always create a graphic block that is width aligned to 16 and height
+    // aligned to 2. We set the correct "crop" value of the image in the call to
+    // createGraphicBuffer() by setting the correct image dimensions.
+    c2_status_t err =
+            pool->fetchGraphicBlock(align(mWidth, 16), align(mHeight, 2), format, usage, &block);
+
+    if (err != C2_OK) {
+        ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+        work->result = err;
+        return false;
+    }
+
+    C2GraphicView wView = block->map().get();
+
+    if (wView.error()) {
+        ALOGE("graphic view map failed %d", wView.error());
+        work->result = C2_CORRUPTED;
+        return false;
+    }
+
+    // ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", block->width(),
+    //       block->height(), mWidth, mHeight, (int)out_frameIndex);
+
+    mOutputBufferIndex = out_frameIndex;
+
+    uint8_t* dstY = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_Y]);
+    uint8_t* dstU = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_U]);
+    uint8_t* dstV = const_cast<uint8_t*>(wView.data()[C2PlanarLayout::PLANE_V]);
+
+    C2PlanarLayout layout = wView.layout();
+    size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+    if (bitdepth == 10) {
+        // TODO: b/277797541 - Investigate if we can ask DAV1D to output the required format during
+        // decompression to avoid color conversion.
+        const uint16_t* srcY = (const uint16_t*)img.data[0];
+        const uint16_t* srcU = (const uint16_t*)img.data[1];
+        const uint16_t* srcV = (const uint16_t*)img.data[2];
+        size_t srcYStride = img.stride[0] / 2;
+        size_t srcUStride = img.stride[1] / 2;
+        size_t srcVStride = img.stride[1] / 2;
+
+        if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+            bool processed = false;
+#if HAVE_LIBYUV_I410_I210_TO_AB30
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                libyuv::I410ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                         dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
+                processed = true;
+            } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+                libyuv::I210ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                         dstYStride, &libyuv::kYuvV2020Constants, mWidth, mHeight);
+                processed = true;
+            }
+#endif  // HAVE_LIBYUV_I410_I210_TO_AB30
+            if (!processed) {
+                if (isMonochrome) {
+                    const size_t tmpSize = mWidth;
+                    const bool needFill = tmpSize > mTmpFrameBufferSize;
+                    if (!allocTmpFrameBuffer(tmpSize)) {
+                        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                        setError(work, C2_NO_MEMORY);
+                        return false;
+                    }
+                    srcU = srcV = mTmpFrameBuffer.get();
+                    srcUStride = srcVStride = 0;
+                    if (needFill) {
+                        std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
+                    }
+                }
+                convertYUV420Planar16ToY410OrRGBA1010102(
+                        (uint32_t*)dstY, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+                        dstYStride / sizeof(uint32_t), mWidth, mHeight,
+                        std::static_pointer_cast<const C2ColorAspectsStruct>(codedColorAspects));
+            }
+        } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+            dstYStride /= 2;
+            dstUStride /= 2;
+            dstVStride /= 2;
+#if LIBYUV_VERSION >= 1779
+            if ((img.p.layout == DAV1D_PIXEL_LAYOUT_I444) ||
+                (img.p.layout == DAV1D_PIXEL_LAYOUT_I422)) {
+                // TODO(https://crbug.com/libyuv/952): replace this block with libyuv::I410ToP010
+                // and libyuv::I210ToP010 when they are available. Note it may be safe to alias dstY
+                // in I010ToP010, but the libyuv API doesn't make any guarantees.
+                const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                uint16_t* const tmpY = mTmpFrameBuffer.get();
+                uint16_t* const tmpU = tmpY + dstYStride * mHeight;
+                uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+                if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                    libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                       dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
+                                       mHeight);
+                } else {
+                    libyuv::I210ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                       dstYStride, tmpU, dstUStride, tmpV, dstUStride, mWidth,
+                                       mHeight);
+                }
+                libyuv::I010ToP010(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstVStride,
+                                   (uint16_t*)dstY, dstYStride, (uint16_t*)dstU, dstUStride, mWidth,
+                                   mHeight);
+            } else {
+                convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                            srcYStride, srcUStride, srcVStride, dstYStride,
+                                            dstUStride, mWidth, mHeight, isMonochrome);
+            }
+#else   // LIBYUV_VERSION < 1779
+            convertYUV420Planar16ToP010((uint16_t*)dstY, (uint16_t*)dstU, srcY, srcU, srcV,
+                                        srcYStride, srcUStride, srcVStride, dstYStride, dstUStride,
+                                        mWidth, mHeight, isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+        } else {
+#if LIBYUV_VERSION >= 1779
+            if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+                // TODO(https://crbug.com/libyuv/950): replace this block with libyuv::I410ToI420
+                // when it's available.
+                const size_t tmpSize = dstYStride * mHeight + dstUStride * align(mHeight, 2);
+                if (!allocTmpFrameBuffer(tmpSize)) {
+                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                    setError(work, C2_NO_MEMORY);
+                    return false;
+                }
+                uint16_t* const tmpY = mTmpFrameBuffer.get();
+                uint16_t* const tmpU = tmpY + dstYStride * mHeight;
+                uint16_t* const tmpV = tmpU + dstUStride * align(mHeight, 2) / 2;
+                libyuv::I410ToI010(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, tmpY,
+                                   dstYStride, tmpU, dstUStride, tmpV, dstVStride, mWidth, mHeight);
+                libyuv::I010ToI420(tmpY, dstYStride, tmpU, dstUStride, tmpV, dstUStride, dstY,
+                                   dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+            } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+                libyuv::I210ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                                   dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+            } else {
+                convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride,
+                                            srcUStride, srcVStride, dstYStride, dstUStride, mWidth,
+                                            mHeight, isMonochrome);
+            }
+#else   // LIBYUV_VERSION < 1779
+            convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                        srcVStride, dstYStride, dstUStride, mWidth, mHeight,
+                                        isMonochrome);
+#endif  // LIBYUV_VERSION >= 1779
+        }
+
+        // Dump the output buffer if dumping is enabled (debug only).
+#ifdef FILE_DUMP_ENABLE
+        FILE* fp_out = mDav1dOutYuvFile;
+
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 10bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
+            for (int i = 0; i < mHeight; i++) {
+                int ret = fwrite((uint8_t*)srcY + i * srcYStride * 2, 1, mWidth * 2, fp_out);
+                if (ret != mWidth * 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth * 2, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcU + i * srcUStride * 2, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcV + i * srcVStride * 2, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+        }
+#endif
+    } else {
+        const uint8_t* srcY = (const uint8_t*)img.data[0];
+        const uint8_t* srcU = (const uint8_t*)img.data[1];
+        const uint8_t* srcV = (const uint8_t*)img.data[2];
+
+        size_t srcYStride = img.stride[0];
+        size_t srcUStride = img.stride[1];
+        size_t srcVStride = img.stride[1];
+
+        // Dump the output buffer is dumping is enabled (debug only)
+#ifdef FILE_DUMP_ENABLE
+        FILE* fp_out = mDav1dOutYuvFile;
+        // if(mOutputBufferIndex % 100 == 0)
+        ALOGV("output a 8bit picture %dx%d from dav1d "
+              "(mInputBufferIndex=%d,mOutputBufferIndex=%d,format=%d).",
+              mWidth, mHeight, mInputBufferIndex, mOutputBufferIndex, format);
+
+        if (fp_out && mOutputBufferIndex <= num_frames_to_dump) {
+            for (int i = 0; i < mHeight; i++) {
+                int ret = fwrite((uint8_t*)srcY + i * srcYStride, 1, mWidth, fp_out);
+                if (ret != mWidth) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcU + i * srcUStride, 1, mWidth / 2, fp_out);
+                if (ret != mWidth / 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
+                    break;
+                }
+            }
+
+            for (int i = 0; i < mHeight / 2; i++) {
+                int ret = fwrite((uint8_t*)srcV + i * srcVStride, 1, mWidth / 2, fp_out);
+                if (ret != mWidth / 2) {
+                    ALOGE("Error in fwrite, requested %d, returned %d", mWidth / 2, ret);
+                    break;
+                }
+            }
+        }
+#endif
+        if (img.p.layout == DAV1D_PIXEL_LAYOUT_I444) {
+            libyuv::I444ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                               dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+        } else if (img.p.layout == DAV1D_PIXEL_LAYOUT_I422) {
+            libyuv::I422ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride, dstY,
+                               dstYStride, dstU, dstUStride, dstV, dstVStride, mWidth, mHeight);
+        } else {
+            convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
+                                       srcVStride, dstYStride, dstUStride, dstVStride, mWidth,
+                                       mHeight, isMonochrome);
+        }
+    }
+
+    dav1d_picture_unref(&img);
+
+    finishWork(out_frameIndex, work, std::move(block));
+    block = nullptr;
+    return true;
+}
+
+c2_status_t C2SoftDav1dDec::drainInternal(uint32_t drainMode,
+                                          const std::shared_ptr<C2BlockPool>& pool,
+                                          const std::unique_ptr<C2Work>& work) {
+    if (drainMode == NO_DRAIN) {
+        ALOGW("drain with NO_DRAIN: no-op");
+        return C2_OK;
+    }
+    if (drainMode == DRAIN_CHAIN) {
+        ALOGW("DRAIN_CHAIN not supported");
+        return C2_OMITTED;
+    }
+
+    while (outputBuffer(pool, work)) {
+    }
+
+    if (drainMode == DRAIN_COMPONENT_WITH_EOS && work && work->workletsProcessed == 0u) {
+        fillEmptyWork(work);
+    }
+
+    return C2_OK;
+}
+
+c2_status_t C2SoftDav1dDec::drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) {
+    return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftDav1dFactory : public C2ComponentFactory {
+  public:
+    C2SoftDav1dFactory()
+        : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+                  GetCodec2PlatformComponentStore()->getParamReflector())) {}
+
+    virtual c2_status_t createComponent(c2_node_id_t id,
+                                        std::shared_ptr<C2Component>* const component,
+                                        std::function<void(C2Component*)> deleter) override {
+        *component = std::shared_ptr<C2Component>(
+                new C2SoftDav1dDec(COMPONENT_NAME, id,
+                                   std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual c2_status_t createInterface(
+            c2_node_id_t id, std::shared_ptr<C2ComponentInterface>* const interface,
+            std::function<void(C2ComponentInterface*)> deleter) override {
+        *interface = std::shared_ptr<C2ComponentInterface>(
+                new SimpleInterface<C2SoftDav1dDec::IntfImpl>(
+                        COMPONENT_NAME, id, std::make_shared<C2SoftDav1dDec::IntfImpl>(mHelper)),
+                deleter);
+        return C2_OK;
+    }
+
+    virtual ~C2SoftDav1dFactory() override = default;
+
+  private:
+    std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+}  // namespace android
+
+__attribute__((cfi_canonical_jump_table)) extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+    ALOGV("in %s", __func__);
+    return new ::android::C2SoftDav1dFactory();
+}
+
+__attribute__((cfi_canonical_jump_table)) extern "C" void DestroyCodec2Factory(
+        ::C2ComponentFactory* factory) {
+    ALOGV("in %s", __func__);
+    delete factory;
+}
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
new file mode 100644
index 0000000..5201456
--- /dev/null
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_DAV1D_DEC_H_
+#define ANDROID_C2_SOFT_DAV1D_DEC_H_
+
+#include <inttypes.h>
+
+#include <memory>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <C2Config.h>
+#include <SimpleC2Component.h>
+
+#include <dav1d/dav1d.h>
+#include <deque>
+
+//#define FILE_DUMP_ENABLE 1
+#define DUMP_FILE_PATH "/data/local/tmp/dump"
+#define INPUT_DATA_DUMP_EXT "av1"
+#define INPUT_SIZE_DUMP_EXT "size"
+#define OUTPUT_YUV_DUMP_EXT "yuv"
+
+namespace android {
+
+struct C2SoftDav1dDec : public SimpleC2Component {
+    class IntfImpl;
+
+    C2SoftDav1dDec(const char* name, c2_node_id_t id, const std::shared_ptr<IntfImpl>& intfImpl);
+    ~C2SoftDav1dDec();
+
+    // Begin SimpleC2Component overrides.
+    c2_status_t onInit() override;
+    c2_status_t onStop() override;
+    void onReset() override;
+    void onRelease() override;
+    c2_status_t onFlush_sm() override;
+    void process(const std::unique_ptr<C2Work>& work,
+                 const std::shared_ptr<C2BlockPool>& pool) override;
+    c2_status_t drain(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool) override;
+    // End SimpleC2Component overrides.
+
+  private:
+    std::shared_ptr<IntfImpl> mIntf;
+
+    int mInputBufferIndex = 0;
+    int mOutputBufferIndex = 0;
+
+    Dav1dContext* mDav1dCtx = nullptr;
+    std::deque<Dav1dPicture> mDecodedPictures;
+
+    // configurations used by component in process
+    // (TODO: keep this in intf but make them internal only)
+    std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+
+    uint32_t mHalPixelFormat;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    bool mSignalledOutputEos;
+    bool mSignalledError;
+    // Used during 10-bit I444/I422 to 10-bit P010 & 8-bit I420 conversions.
+    std::unique_ptr<uint16_t[]> mTmpFrameBuffer;
+    size_t mTmpFrameBufferSize = 0;
+
+    C2StreamHdrStaticMetadataInfo::output mHdrStaticMetadataInfo;
+    std::unique_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfo = nullptr;
+
+    // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+    // converting them to C2 values for each frame
+    struct VuiColorAspects {
+        uint8_t primaries;
+        uint8_t transfer;
+        uint8_t coeffs;
+        uint8_t fullRange;
+
+        // default color aspects
+        VuiColorAspects()
+            : primaries(C2Color::PRIMARIES_UNSPECIFIED),
+              transfer(C2Color::TRANSFER_UNSPECIFIED),
+              coeffs(C2Color::MATRIX_UNSPECIFIED),
+              fullRange(C2Color::RANGE_UNSPECIFIED) {}
+
+        bool operator==(const VuiColorAspects& o) {
+            return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs &&
+                   fullRange == o.fullRange;
+        }
+    } mBitstreamColorAspects;
+
+    nsecs_t mTimeStart = 0;  // Time at the start of decode()
+    nsecs_t mTimeEnd = 0;    // Time at the end of decode()
+
+    bool initDecoder();
+    void getHDRStaticParams(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getHDR10PlusInfoData(Dav1dPicture* picture, const std::unique_ptr<C2Work>& work);
+    void getVuiParams(Dav1dPicture* picture);
+    void destroyDecoder();
+    void finishWork(uint64_t index, const std::unique_ptr<C2Work>& work,
+                    const std::shared_ptr<C2GraphicBlock>& block);
+    // Sets |work->result| and mSignalledError. Returns false.
+    void setError(const std::unique_ptr<C2Work>& work, c2_status_t error);
+    bool allocTmpFrameBuffer(size_t size);
+    bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
+                      const std::unique_ptr<C2Work>& work);
+
+    c2_status_t drainInternal(uint32_t drainMode, const std::shared_ptr<C2BlockPool>& pool,
+                              const std::unique_ptr<C2Work>& work);
+
+    void flushDav1d();
+
+#ifdef FILE_DUMP_ENABLE
+    char mInDataFileName[256];
+    char mInSizeFileName[256];
+    char mDav1dOutYuvFileName[256];
+
+    FILE* mInDataFile = nullptr;
+    FILE* mInSizeFile = nullptr;
+    FILE* mDav1dOutYuvFile = nullptr;
+
+    void writeDav1dOutYuvFile(const Dav1dPicture& p);
+
+    int num_frames_to_dump = 0;
+#endif
+
+    C2_DO_NOT_COPY(C2SoftDav1dDec);
+};
+
+}  // namespace android
+
+#endif  // ANDROID_C2_SOFT_DAV1D_DEC_H_
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 3e4247b..5f5f05d 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -752,6 +752,19 @@
     return true;
 }
 
+bool C2SoftGav1Dec::fillMonochromeRow(int value) {
+    const size_t tmpSize = mWidth;
+    const bool needFill = tmpSize > mTmpFrameBufferSize;
+    if (!allocTmpFrameBuffer(tmpSize)) {
+        ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+        return false;
+    }
+    if (needFill) {
+        std::fill_n(mTmpFrameBuffer.get(), tmpSize, value);
+    }
+    return true;
+}
+
 bool C2SoftGav1Dec::outputBuffer(const std::shared_ptr<C2BlockPool> &pool,
                                  const std::unique_ptr<C2Work> &work) {
   if (!(work && pool)) return false;
@@ -773,6 +786,16 @@
     return false;
   }
 
+#if LIBYUV_VERSION < 1871
+  if (buffer->bitdepth > 10) {
+    ALOGE("bitdepth %d is not supported", buffer->bitdepth);
+    mSignalledError = true;
+    work->workletsProcessed = 1u;
+    work->result = C2_CORRUPTED;
+    return false;
+  }
+#endif
+
   const int width = buffer->displayed_width[0];
   const int height = buffer->displayed_height[0];
   if (width != mWidth || height != mHeight) {
@@ -816,7 +839,7 @@
   std::shared_ptr<C2GraphicBlock> block;
   uint32_t format = HAL_PIXEL_FORMAT_YV12;
   std::shared_ptr<C2StreamColorAspectsInfo::output> codedColorAspects;
-  if (buffer->bitdepth == 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+  if (buffer->bitdepth >= 10 && mPixelFormatInfo->value != HAL_PIXEL_FORMAT_YCBCR_420_888) {
     IntfImpl::Lock lock = mIntf->lock();
     codedColorAspects = mIntf->getColorAspects_l();
     bool allowRGBA1010102 = false;
@@ -828,8 +851,9 @@
     format = getHalPixelFormatForBitDepth10(allowRGBA1010102);
 #if !HAVE_LIBYUV_I410_I210_TO_AB30
     if ((format == HAL_PIXEL_FORMAT_RGBA_1010102) &&
-        (buffer->image_format != libgav1::kImageFormatYuv420)) {
-        ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+        (buffer->image_format != libgav1::kImageFormatYuv420) &&
+        (buffer->bitdepth == 10)) {
+        ALOGE("Only YUV420 output is supported for 10-bit when targeting RGBA_1010102");
       mSignalledError = true;
       work->result = C2_OMITTED;
       work->workletsProcessed = 1u;
@@ -837,6 +861,18 @@
     }
 #endif
   }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_RGBA_1010102 &&
+      (buffer->image_format == libgav1::kImageFormatYuv422 ||
+       buffer->image_format == libgav1::kImageFormatYuv444)) {
+      // There are no 12-bit color conversion functions from YUV422/YUV444 to
+      // RGBA_1010102. Use 8-bit YV12 in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
+  if (buffer->bitdepth == 12 && format == HAL_PIXEL_FORMAT_YCBCR_P010) {
+      // There are no 12-bit color conversion functions to P010. Use 8-bit YV12
+      // in this case.
+      format = HAL_PIXEL_FORMAT_YV12;
+  }
 
   if (mHalPixelFormat != format) {
     C2StreamPixelFormatInfo::output pixelFormat(0u, format);
@@ -890,7 +926,41 @@
   size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
   size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
-  if (buffer->bitdepth == 10) {
+  if (buffer->bitdepth == 12) {
+#if LIBYUV_VERSION >= 1871
+      const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
+      const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
+      const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
+      size_t srcYStride = buffer->stride[0] / 2;
+      size_t srcUStride = buffer->stride[1] / 2;
+      size_t srcVStride = buffer->stride[2] / 2;
+      if (isMonochrome) {
+          if (!fillMonochromeRow(2048)) {
+              setError(work, C2_NO_MEMORY);
+              return false;
+          }
+          srcU = srcV = mTmpFrameBuffer.get();
+          srcUStride = srcVStride = 0;
+      }
+      if (format == HAL_PIXEL_FORMAT_RGBA_1010102) {
+          libyuv::I012ToAB30Matrix(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                                   dstY, dstYStride, &libyuv::kYuvV2020Constants,
+                                   mWidth, mHeight);
+      } else if (isMonochrome || buffer->image_format == libgav1::kImageFormatYuv420) {
+          libyuv::I012ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else if (buffer->image_format == libgav1::kImageFormatYuv444) {
+          libyuv::I412ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      } else {
+          libyuv::I212ToI420(srcY, srcYStride, srcU, srcUStride, srcV, srcVStride,
+                             dstY, dstYStride, dstU, dstUStride, dstV, dstVStride,
+                             mWidth, mHeight);
+      }
+#endif  // LIBYUV_VERSION >= 1871
+  } else if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
     const uint16_t *srcU = (const uint16_t *)buffer->plane[1];
     const uint16_t *srcV = (const uint16_t *)buffer->plane[2];
@@ -915,18 +985,12 @@
 #endif  // HAVE_LIBYUV_I410_I210_TO_AB30
         if (!processed) {
             if (isMonochrome) {
-                const size_t tmpSize = mWidth;
-                const bool needFill = tmpSize > mTmpFrameBufferSize;
-                if (!allocTmpFrameBuffer(tmpSize)) {
-                    ALOGE("Error allocating temp conversion buffer (%zu bytes)", tmpSize);
+                if (!fillMonochromeRow(512)) {
                     setError(work, C2_NO_MEMORY);
                     return false;
                 }
                 srcU = srcV = mTmpFrameBuffer.get();
                 srcUStride = srcVStride = 0;
-                if (needFill) {
-                    std::fill_n(mTmpFrameBuffer.get(), tmpSize, 512);
-                }
             }
             convertYUV420Planar16ToY410OrRGBA1010102(
                     (uint32_t *)dstY, srcY, srcU, srcV, srcYStride,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.h b/media/codec2/components/gav1/C2SoftGav1Dec.h
index c3b27ea..0e09fcc 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.h
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.h
@@ -105,6 +105,7 @@
   // Sets |work->result| and mSignalledError. Returns false.
   void setError(const std::unique_ptr<C2Work> &work, c2_status_t error);
   bool allocTmpFrameBuffer(size_t size);
+  bool fillMonochromeRow(int value);
   bool outputBuffer(const std::shared_ptr<C2BlockPool>& pool,
                     const std::unique_ptr<C2Work>& work);
   c2_status_t drainInternal(uint32_t drainMode,
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index 38c5aa5..7a0525b 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -38,6 +38,7 @@
         "android.hardware.media.c2@1.0",
         "android.hardware.media.c2@1.1",
         "android.hardware.media.c2@1.2",
+        "android.hardware.media.c2-V1-ndk",
         "libbase",
         "libbinder",
         "libbinder_ndk",
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 97c0806..9a4b385 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -16,14 +16,16 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2Client"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android-base/logging.h>
-
+#include <utils/Trace.h>
 #include <codec2/hidl/client.h>
 #include <C2Debug.h>
 #include <C2BufferPriv.h>
 #include <C2Config.h> // for C2StreamUsageTuning
 #include <C2PlatformSupport.h>
 
+#include <android/binder_auto_utils.h>
 #include <android/hardware/media/bufferpool/2.0/IClientManager.h>
 #include <android/hardware/media/c2/1.0/IComponent.h>
 #include <android/hardware/media/c2/1.0/IComponentInterface.h>
@@ -32,6 +34,15 @@
 #include <android/hardware/media/c2/1.0/IConfigurable.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 
+#include <aidl/android/hardware/media/c2/FieldSupportedValues.h>
+#include <aidl/android/hardware/media/c2/FieldSupportedValuesQuery.h>
+#include <aidl/android/hardware/media/c2/FieldSupportedValuesQueryResult.h>
+#include <aidl/android/hardware/media/c2/IComponent.h>
+#include <aidl/android/hardware/media/c2/IComponentInterface.h>
+#include <aidl/android/hardware/media/c2/IComponentStore.h>
+#include <aidl/android/hardware/media/c2/IConfigurable.h>
+#include <aidl/android/hardware/media/c2/ParamDescriptor.h>
+
 #include <android-base/properties.h>
 #include <bufferpool/ClientManager.h>
 #include <codec2/hidl/1.0/types.h>
@@ -47,7 +58,6 @@
 #include <system/window.h> // for NATIVE_WINDOW_QUERY_*
 #include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
 
-
 #include <deque>
 #include <iterator>
 #include <limits>
@@ -65,11 +75,6 @@
 using ::android::hardware::Return;
 using ::android::hardware::Void;
 
-using namespace ::android::hardware::media::c2::V1_1;
-using namespace ::android::hardware::media::c2::V1_1::utils;
-using namespace ::android::hardware::media::bufferpool::V2_0;
-using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
-
 using HGraphicBufferProducer1 = ::android::hardware::graphics::bufferqueue::
         V1_0::IGraphicBufferProducer;
 using HGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
@@ -80,6 +85,13 @@
         V2_0::utils::H2BGraphicBufferProducer;
 using ::android::hardware::media::c2::V1_2::SurfaceSyncObj;
 
+namespace bufferpool_hidl = ::android::hardware::media::bufferpool::V2_0;
+namespace c2_aidl = ::aidl::android::hardware::media::c2;
+namespace c2_hidl_base = ::android::hardware::media::c2;
+namespace c2_hidl = ::android::hardware::media::c2::V1_2;
+
+using c2_hidl::utils::operator<<;
+
 namespace /* unnamed */ {
 
 // c2_status_t value that corresponds to hwbinder transaction failure.
@@ -254,15 +266,43 @@
         return sCaches;
     }
 };
+// Codec2ConfigurableClient::HidlImpl
 
-// Codec2ConfigurableClient
+struct Codec2ConfigurableClient::HidlImpl : public Codec2ConfigurableClient::ImplBase {
+    typedef c2_hidl::IConfigurable Base;
 
-const C2String& Codec2ConfigurableClient::getName() const {
-    return mName;
-}
+    // base cannot be null.
+    explicit HidlImpl(const sp<Base>& base);
 
-Codec2ConfigurableClient::Codec2ConfigurableClient(
-        const sp<IConfigurable>& base)
+    const C2String& getName() const override {
+        return mName;
+    }
+
+    c2_status_t query(
+            const std::vector<C2Param*>& stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+
+    c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+
+    c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override;
+
+    c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override;
+
+private:
+    sp<Base> mBase;
+    const C2String mName;
+};
+
+Codec2ConfigurableClient::HidlImpl::HidlImpl(const sp<Base>& base)
       : mBase{base},
         mName{[base]() -> C2String {
                 C2String outName;
@@ -274,12 +314,12 @@
             }()} {
 }
 
-c2_status_t Codec2ConfigurableClient::query(
+c2_status_t Codec2ConfigurableClient::HidlImpl::query(
         const std::vector<C2Param*> &stackParams,
         const std::vector<C2Param::Index> &heapParamIndices,
         c2_blocking_t mayBlock,
         std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
-    hidl_vec<ParamIndex> indices(
+    hidl_vec<c2_hidl::ParamIndex> indices(
             stackParams.size() + heapParamIndices.size());
     size_t numIndices = 0;
     for (C2Param* const& stackParam : stackParams) {
@@ -287,12 +327,12 @@
             LOG(WARNING) << "query -- null stack param encountered.";
             continue;
         }
-        indices[numIndices++] = static_cast<ParamIndex>(stackParam->index());
+        indices[numIndices++] = static_cast<c2_hidl::ParamIndex>(stackParam->index());
     }
     size_t numStackIndices = numIndices;
     for (const C2Param::Index& index : heapParamIndices) {
         indices[numIndices++] =
-                static_cast<ParamIndex>(static_cast<uint32_t>(index));
+                static_cast<c2_hidl::ParamIndex>(static_cast<uint32_t>(index));
     }
     indices.resize(numIndices);
     if (heapParams) {
@@ -303,7 +343,7 @@
             indices,
             mayBlock == C2_MAY_BLOCK,
             [&status, &numStackIndices, &stackParams, heapParams](
-                    Status s, const Params& p) {
+                    c2_hidl::Status s, const c2_hidl::Params& p) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK && status != C2_BAD_INDEX) {
                     LOG(DEBUG) << "query -- call failed: "
@@ -311,7 +351,7 @@
                     return;
                 }
                 std::vector<C2Param*> paramPointers;
-                if (!parseParamsBlob(&paramPointers, p)) {
+                if (!c2_hidl::utils::parseParamsBlob(&paramPointers, p)) {
                     LOG(ERROR) << "query -- error while parsing params.";
                     status = C2_CORRUPTED;
                     return;
@@ -371,12 +411,12 @@
     return status;
 }
 
-c2_status_t Codec2ConfigurableClient::config(
+c2_status_t Codec2ConfigurableClient::HidlImpl::config(
         const std::vector<C2Param*> &params,
         c2_blocking_t mayBlock,
         std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
-    Params hidlParams;
-    if (!createParamsBlob(&hidlParams, params)) {
+    c2_hidl::Params hidlParams;
+    if (!c2_hidl::utils::createParamsBlob(&hidlParams, params)) {
         LOG(ERROR) << "config -- bad input.";
         return C2_TRANSACTION_FAILED;
     }
@@ -385,9 +425,9 @@
             hidlParams,
             mayBlock == C2_MAY_BLOCK,
             [&status, &params, failures](
-                    Status s,
-                    const hidl_vec<SettingResult> f,
-                    const Params& o) {
+                    c2_hidl::Status s,
+                    const hidl_vec<c2_hidl::SettingResult> f,
+                    const c2_hidl::Params& o) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK && status != C2_BAD_INDEX) {
                     LOG(DEBUG) << "config -- call failed: "
@@ -395,14 +435,14 @@
                 }
                 size_t i = failures->size();
                 failures->resize(i + f.size());
-                for (const SettingResult& sf : f) {
-                    if (!objcpy(&(*failures)[i++], sf)) {
+                for (const c2_hidl::SettingResult& sf : f) {
+                    if (!c2_hidl::utils::objcpy(&(*failures)[i++], sf)) {
                         LOG(ERROR) << "config -- "
                                    << "invalid SettingResult returned.";
                         return;
                     }
                 }
-                if (!updateParamsFromBlob(params, o)) {
+                if (!c2_hidl::utils::updateParamsFromBlob(params, o)) {
                     LOG(ERROR) << "config -- "
                                << "failed to parse returned params.";
                     status = C2_CORRUPTED;
@@ -415,7 +455,7 @@
     return status;
 }
 
-c2_status_t Codec2ConfigurableClient::querySupportedParams(
+c2_status_t Codec2ConfigurableClient::HidlImpl::querySupportedParams(
         std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
     // TODO: Cache and query properly!
     c2_status_t status;
@@ -423,8 +463,8 @@
             std::numeric_limits<uint32_t>::min(),
             std::numeric_limits<uint32_t>::max(),
             [&status, params](
-                    Status s,
-                    const hidl_vec<ParamDescriptor>& p) {
+                    c2_hidl::Status s,
+                    const hidl_vec<c2_hidl::ParamDescriptor>& p) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "querySupportedParams -- call failed: "
@@ -433,8 +473,8 @@
                 }
                 size_t i = params->size();
                 params->resize(i + p.size());
-                for (const ParamDescriptor& sp : p) {
-                    if (!objcpy(&(*params)[i++], sp)) {
+                for (const c2_hidl::ParamDescriptor& sp : p) {
+                    if (!c2_hidl::utils::objcpy(&(*params)[i++], sp)) {
                         LOG(ERROR) << "querySupportedParams -- "
                                    << "invalid returned ParamDescriptor.";
                         return;
@@ -448,12 +488,12 @@
     return status;
 }
 
-c2_status_t Codec2ConfigurableClient::querySupportedValues(
+c2_status_t Codec2ConfigurableClient::HidlImpl::querySupportedValues(
         std::vector<C2FieldSupportedValuesQuery>& fields,
         c2_blocking_t mayBlock) const {
-    hidl_vec<FieldSupportedValuesQuery> inFields(fields.size());
+    hidl_vec<c2_hidl::FieldSupportedValuesQuery> inFields(fields.size());
     for (size_t i = 0; i < fields.size(); ++i) {
-        if (!objcpy(&inFields[i], fields[i])) {
+        if (!c2_hidl::utils::objcpy(&inFields[i], fields[i])) {
             LOG(ERROR) << "querySupportedValues -- bad input";
             return C2_TRANSACTION_FAILED;
         }
@@ -464,8 +504,8 @@
             inFields,
             mayBlock == C2_MAY_BLOCK,
             [&status, &inFields, &fields](
-                    Status s,
-                    const hidl_vec<FieldSupportedValuesQueryResult>& r) {
+                    c2_hidl::Status s,
+                    const hidl_vec<c2_hidl::FieldSupportedValuesQueryResult>& r) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "querySupportedValues -- call failed: "
@@ -480,7 +520,7 @@
                     return;
                 }
                 for (size_t i = 0; i < fields.size(); ++i) {
-                    if (!objcpy(&fields[i], inFields[i], r[i])) {
+                    if (!c2_hidl::utils::objcpy(&fields[i], inFields[i], r[i])) {
                         LOG(ERROR) << "querySupportedValues -- "
                                       "invalid returned value.";
                         status = C2_CORRUPTED;
@@ -495,14 +535,135 @@
     return status;
 }
 
+// Codec2ConfigurableClient::AidlImpl
+
+struct Codec2ConfigurableClient::AidlImpl : public Codec2ConfigurableClient::ImplBase {
+    typedef c2_aidl::IConfigurable Base;
+
+    // base cannot be null.
+    explicit AidlImpl(const std::shared_ptr<Base>& base);
+
+    const C2String& getName() const override {
+        return mName;
+    }
+
+    c2_status_t query(
+            const std::vector<C2Param*>& stackParams,
+            const std::vector<C2Param::Index> &heapParamIndices,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2Param>>* const heapParams) const override;
+
+    c2_status_t config(
+            const std::vector<C2Param*> &params,
+            c2_blocking_t mayBlock,
+            std::vector<std::unique_ptr<C2SettingResult>>* const failures) override;
+
+    c2_status_t querySupportedParams(
+            std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+            ) const override;
+
+    c2_status_t querySupportedValues(
+            std::vector<C2FieldSupportedValuesQuery>& fields,
+            c2_blocking_t mayBlock) const override;
+
+private:
+    std::shared_ptr<Base> mBase;
+    const C2String mName;
+};
+
+Codec2ConfigurableClient::AidlImpl::AidlImpl(const std::shared_ptr<Base>& base)
+      : mBase{base},
+        mName{[base]() -> C2String {
+                std::string outName;
+                ndk::ScopedAStatus status = base->getName(&outName);
+                return status.isOk() ? outName : "";
+            }()} {
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::query(
+        const std::vector<C2Param*> &stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+    (void)stackParams, (void)heapParamIndices, (void)mayBlock, (void)heapParams;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::config(
+        const std::vector<C2Param*> &params,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+    (void)params, (void)mayBlock, (void)failures;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::querySupportedParams(
+        std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
+    (void)params;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+c2_status_t Codec2ConfigurableClient::AidlImpl::querySupportedValues(
+        std::vector<C2FieldSupportedValuesQuery>& fields,
+        c2_blocking_t mayBlock) const {
+    (void)fields, (void)mayBlock;
+    // TODO: implementation
+    return C2_OMITTED;
+}
+
+// Codec2ConfigurableClient
+
+Codec2ConfigurableClient::Codec2ConfigurableClient(const sp<HidlBase> &hidlBase)
+    : mImpl(new Codec2ConfigurableClient::HidlImpl(hidlBase)) {
+}
+
+Codec2ConfigurableClient::Codec2ConfigurableClient(
+        const std::shared_ptr<AidlBase> &aidlBase)
+    : mImpl(new Codec2ConfigurableClient::AidlImpl(aidlBase)) {
+}
+
+const C2String& Codec2ConfigurableClient::getName() const {
+    return mImpl->getName();
+}
+
+c2_status_t Codec2ConfigurableClient::query(
+        const std::vector<C2Param*>& stackParams,
+        const std::vector<C2Param::Index> &heapParamIndices,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2Param>>* const heapParams) const {
+    return mImpl->query(stackParams, heapParamIndices, mayBlock, heapParams);
+}
+
+c2_status_t Codec2ConfigurableClient::config(
+        const std::vector<C2Param*> &params,
+        c2_blocking_t mayBlock,
+        std::vector<std::unique_ptr<C2SettingResult>>* const failures) {
+    return mImpl->config(params, mayBlock, failures);
+}
+
+c2_status_t Codec2ConfigurableClient::querySupportedParams(
+        std::vector<std::shared_ptr<C2ParamDescriptor>>* const params) const {
+    return mImpl->querySupportedParams(params);
+}
+
+c2_status_t Codec2ConfigurableClient::querySupportedValues(
+        std::vector<C2FieldSupportedValuesQuery>& fields,
+        c2_blocking_t mayBlock) const {
+    return mImpl->querySupportedValues(fields, mayBlock);
+}
+
+
 // Codec2Client::Component::HidlListener
-struct Codec2Client::Component::HidlListener : public IComponentListener {
+struct Codec2Client::Component::HidlListener : public c2_hidl::IComponentListener {
     std::weak_ptr<Component> component;
     std::weak_ptr<Listener> base;
 
-    virtual Return<void> onWorkDone(const WorkBundle& workBundle) override {
+    virtual Return<void> onWorkDone(const c2_hidl::WorkBundle& workBundle) override {
         std::list<std::unique_ptr<C2Work>> workItems;
-        if (!objcpy(&workItems, workBundle)) {
+        if (!c2_hidl::utils::objcpy(&workItems, workBundle)) {
             LOG(DEBUG) << "onWorkDone -- received corrupted WorkBundle.";
             return Void();
         }
@@ -521,12 +682,12 @@
     }
 
     virtual Return<void> onTripped(
-            const hidl_vec<SettingResult>& settingResults) override {
+            const hidl_vec<c2_hidl::SettingResult>& settingResults) override {
         std::vector<std::shared_ptr<C2SettingResult>> c2SettingResults(
                 settingResults.size());
         for (size_t i = 0; i < settingResults.size(); ++i) {
             std::unique_ptr<C2SettingResult> c2SettingResult;
-            if (!objcpy(&c2SettingResult, settingResults[i])) {
+            if (!c2_hidl::utils::objcpy(&c2SettingResult, settingResults[i])) {
                 LOG(DEBUG) << "onTripped -- received corrupted SettingResult.";
                 return Void();
             }
@@ -540,13 +701,13 @@
         return Void();
     }
 
-    virtual Return<void> onError(Status s, uint32_t errorCode) override {
+    virtual Return<void> onError(c2_hidl::Status s, uint32_t errorCode) override {
         LOG(DEBUG) << "onError --"
                    << " status = " << s
                    << ", errorCode = " << errorCode
                    << ".";
         if (std::shared_ptr<Listener> listener = base.lock()) {
-            listener->onError(component, s == Status::OK ?
+            listener->onError(component, s == c2_hidl::Status::OK ?
                     errorCode : static_cast<c2_status_t>(s));
         } else {
             LOG(DEBUG) << "onError -- listener died.";
@@ -610,18 +771,18 @@
 
 // Codec2Client
 Codec2Client::Codec2Client(sp<Base> const& base,
-                           sp<IConfigurable> const& configurable,
+                           sp<c2_hidl::IConfigurable> const& configurable,
                            size_t serviceIndex)
       : Configurable{configurable},
         mBase1_0{base},
         mBase1_1{Base1_1::castFrom(base)},
         mBase1_2{Base1_2::castFrom(base)},
         mServiceIndex{serviceIndex} {
-    Return<sp<IClientManager>> transResult = base->getPoolClientManager();
+    Return<sp<bufferpool_hidl::IClientManager>> transResult = base->getPoolClientManager();
     if (!transResult.isOk()) {
         LOG(ERROR) << "getPoolClientManager -- transaction failed.";
     } else {
-        mHostPoolManager = static_cast<sp<IClientManager>>(transResult);
+        mHostPoolManager = static_cast<sp<bufferpool_hidl::IClientManager>>(transResult);
     }
 }
 
@@ -658,10 +819,10 @@
         transStatus = mBase1_2->createComponent_1_2(
             name,
             hidlListener,
-            ClientManager::getInstance(),
+            bufferpool_hidl::implementation::ClientManager::getInstance(),
             [&status, component, hidlListener](
-                    Status s,
-                    const sp<IComponent>& c) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl::IComponent>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -674,10 +835,10 @@
         transStatus = mBase1_1->createComponent_1_1(
             name,
             hidlListener,
-            ClientManager::getInstance(),
+            bufferpool_hidl::implementation::ClientManager::getInstance(),
             [&status, component, hidlListener](
-                    Status s,
-                    const sp<IComponent>& c) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl_base::V1_1::IComponent>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -689,10 +850,10 @@
         transStatus = mBase1_0->createComponent(
             name,
             hidlListener,
-            ClientManager::getInstance(),
+            bufferpool_hidl::implementation::ClientManager::getInstance(),
             [&status, component, hidlListener](
-                    Status s,
-                    const sp<hardware::media::c2::V1_0::IComponent>& c) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl_base::V1_0::IComponent>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -740,8 +901,8 @@
     Return<void> transStatus = mBase1_0->createInterface(
             name,
             [&status, interface](
-                    Status s,
-                    const sp<IComponentInterface>& i) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl::IComponentInterface>& i) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -771,8 +932,8 @@
     c2_status_t status;
     Return<void> transStatus = mBase1_0->createInputSurface(
             [&status, inputSurface](
-                    Status s,
-                    const sp<IInputSurface>& i) {
+                    c2_hidl::Status s,
+                    const sp<c2_hidl::IInputSurface>& i) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     return;
@@ -798,16 +959,16 @@
     std::vector<C2Component::Traits> traits;
     std::string const& serviceName = getServiceName();
     Return<void> transStatus = mBase1_0->listComponents(
-            [&traits, &serviceName](Status s,
-                   const hidl_vec<IComponentStore::ComponentTraits>& t) {
-                if (s != Status::OK) {
+            [&traits, &serviceName](c2_hidl::Status s,
+                   const hidl_vec<c2_hidl::IComponentStore::ComponentTraits>& t) {
+                if (s != c2_hidl::Status::OK) {
                     LOG(DEBUG) << "_listComponents -- call failed: "
                                << static_cast<c2_status_t>(s) << ".";
                     return;
                 }
                 traits.resize(t.size());
                 for (size_t i = 0; i < t.size(); ++i) {
-                    if (!objcpy(&traits[i], t[i])) {
+                    if (!c2_hidl::utils::objcpy(&traits[i], t[i])) {
                         LOG(ERROR) << "_listComponents -- corrupted output.";
                         return;
                     }
@@ -839,14 +1000,14 @@
     // should reflect the HAL API.
     struct SimpleParamReflector : public C2ParamReflector {
         virtual std::unique_ptr<C2StructDescriptor> describe(C2Param::CoreIndex coreIndex) const {
-            hidl_vec<ParamIndex> indices(1);
-            indices[0] = static_cast<ParamIndex>(coreIndex.coreIndex());
+            hidl_vec<c2_hidl::ParamIndex> indices(1);
+            indices[0] = static_cast<c2_hidl::ParamIndex>(coreIndex.coreIndex());
             std::unique_ptr<C2StructDescriptor> descriptor;
             Return<void> transStatus = mBase->getStructDescriptors(
                     indices,
                     [&descriptor](
-                            Status s,
-                            const hidl_vec<StructDescriptor>& sd) {
+                            c2_hidl::Status s,
+                            const hidl_vec<c2_hidl::StructDescriptor>& sd) {
                         c2_status_t status = static_cast<c2_status_t>(s);
                         if (status != C2_OK) {
                             LOG(DEBUG) << "SimpleParamReflector -- "
@@ -864,7 +1025,7 @@
                             descriptor.reset();
                             return;
                         }
-                        if (!objcpy(&descriptor, sd[0])) {
+                        if (!c2_hidl::utils::objcpy(&descriptor, sd[0])) {
                             LOG(DEBUG) << "SimpleParamReflector -- "
                                           "getStructDescriptors() returned "
                                           "corrupted data.";
@@ -1196,11 +1357,11 @@
 // Codec2Client::Interface
 Codec2Client::Interface::Interface(const sp<Base>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -1210,17 +1371,17 @@
 // Codec2Client::Component
 Codec2Client::Component::Component(const sp<Base>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IComponentInterface>> transResult1 =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IComponentInterface>> transResult1 =
                         base->getInterface();
                 if (!transResult1.isOk()) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult2 =
-                        static_cast<sp<IComponentInterface>>(transResult1)->
+                Return<sp<c2_hidl::IConfigurable>> transResult2 =
+                        static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
                         getConfigurable();
                 return transResult2.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult2) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
                         nullptr;
             }()
         },
@@ -1233,17 +1394,17 @@
 
 Codec2Client::Component::Component(const sp<Base1_1>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IComponentInterface>> transResult1 =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IComponentInterface>> transResult1 =
                         base->getInterface();
                 if (!transResult1.isOk()) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult2 =
-                        static_cast<sp<IComponentInterface>>(transResult1)->
+                Return<sp<c2_hidl::IConfigurable>> transResult2 =
+                        static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
                         getConfigurable();
                 return transResult2.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult2) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
                         nullptr;
             }()
         },
@@ -1256,17 +1417,17 @@
 
 Codec2Client::Component::Component(const sp<Base1_2>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IComponentInterface>> transResult1 =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IComponentInterface>> transResult1 =
                         base->getInterface();
                 if (!transResult1.isOk()) {
                     return nullptr;
                 }
-                Return<sp<IConfigurable>> transResult2 =
-                        static_cast<sp<IComponentInterface>>(transResult1)->
+                Return<sp<c2_hidl::IConfigurable>> transResult2 =
+                        static_cast<sp<c2_hidl::IComponentInterface>>(transResult1)->
                         getConfigurable();
                 return transResult2.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult2) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult2) :
                         nullptr;
             }()
         },
@@ -1288,9 +1449,9 @@
     Return<void> transStatus = mBase1_0->createBlockPool(
             static_cast<uint32_t>(id),
             [&status, blockPoolId, configurable](
-                    Status s,
+                    c2_hidl::Status s,
                     uint64_t pId,
-                    const sp<IConfigurable>& c) {
+                    const sp<c2_hidl::IConfigurable>& c) {
                 status = static_cast<c2_status_t>(s);
                 configurable->reset();
                 if (status != C2_OK) {
@@ -1310,13 +1471,13 @@
 
 c2_status_t Codec2Client::Component::destroyBlockPool(
         C2BlockPool::local_id_t localId) {
-    Return<Status> transResult = mBase1_0->destroyBlockPool(
+    Return<c2_hidl::Status> transResult = mBase1_0->destroyBlockPool(
             static_cast<uint64_t>(localId));
     if (!transResult.isOk()) {
         LOG(ERROR) << "destroyBlockPool -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
-    return static_cast<c2_status_t>(static_cast<Status>(transResult));
+    return static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transResult));
 }
 
 void Codec2Client::Component::handleOnWorkDone(
@@ -1327,18 +1488,18 @@
 
 c2_status_t Codec2Client::Component::queue(
         std::list<std::unique_ptr<C2Work>>* const items) {
-    WorkBundle workBundle;
+    c2_hidl::WorkBundle workBundle;
     if (!objcpy(&workBundle, *items, mBufferPoolSender.get())) {
         LOG(ERROR) << "queue -- bad input.";
         return C2_TRANSACTION_FAILED;
     }
-    Return<Status> transStatus = mBase1_0->queue(workBundle);
+    Return<c2_hidl::Status> transStatus = mBase1_0->queue(workBundle);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "queue -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "queue -- call failed: " << status << ".";
     }
@@ -1352,13 +1513,13 @@
     c2_status_t status;
     Return<void> transStatus = mBase1_0->flush(
             [&status, flushedWork](
-                    Status s, const WorkBundle& wb) {
+                    c2_hidl::Status s, const c2_hidl::WorkBundle& wb) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "flush -- call failed: " << status << ".";
                     return;
                 }
-                if (!objcpy(flushedWork, wb)) {
+                if (!c2_hidl::utils::objcpy(flushedWork, wb)) {
                     status = C2_CORRUPTED;
                 } else {
                     status = C2_OK;
@@ -1391,14 +1552,14 @@
 }
 
 c2_status_t Codec2Client::Component::drain(C2Component::drain_mode_t mode) {
-    Return<Status> transStatus = mBase1_0->drain(
+    Return<c2_hidl::Status> transStatus = mBase1_0->drain(
             mode == C2Component::DRAIN_COMPONENT_WITH_EOS);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "drain -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "drain -- call failed: " << status << ".";
     }
@@ -1406,13 +1567,13 @@
 }
 
 c2_status_t Codec2Client::Component::start() {
-    Return<Status> transStatus = mBase1_0->start();
+    Return<c2_hidl::Status> transStatus = mBase1_0->start();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "start -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "start -- call failed: " << status << ".";
     }
@@ -1420,13 +1581,13 @@
 }
 
 c2_status_t Codec2Client::Component::stop() {
-    Return<Status> transStatus = mBase1_0->stop();
+    Return<c2_hidl::Status> transStatus = mBase1_0->stop();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "stop -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "stop -- call failed: " << status << ".";
     }
@@ -1434,13 +1595,13 @@
 }
 
 c2_status_t Codec2Client::Component::reset() {
-    Return<Status> transStatus = mBase1_0->reset();
+    Return<c2_hidl::Status> transStatus = mBase1_0->reset();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "reset -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "reset -- call failed: " << status << ".";
     }
@@ -1448,13 +1609,13 @@
 }
 
 c2_status_t Codec2Client::Component::release() {
-    Return<Status> transStatus = mBase1_0->release();
+    Return<c2_hidl::Status> transStatus = mBase1_0->release();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "release -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "release -- call failed: " << status << ".";
     }
@@ -1471,7 +1632,7 @@
     c2_status_t status{};
     Return<void> transStatus = mBase1_1->configureVideoTunnel(avSyncHwId,
             [&status, sidebandHandle](
-                    Status s, hardware::hidl_handle const& h) {
+                    c2_hidl::Status s, hardware::hidl_handle const& h) {
                 status = static_cast<c2_status_t>(s);
                 if (h.getNativeHandle()) {
                     *sidebandHandle = native_handle_clone(h.getNativeHandle());
@@ -1550,7 +1711,7 @@
     ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
             generation, (long long)consumerUsage, syncObj ? " sync" : "");
 
-    Return<Status> transStatus = syncObj ?
+    Return<c2_hidl::Status> transStatus = syncObj ?
             mBase1_2->setOutputSurfaceWithSyncObj(
                     static_cast<uint64_t>(blockPoolId),
                     bqId == 0 ? nullHgbp : igbp, *syncObj) :
@@ -1565,7 +1726,7 @@
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "setOutputSurface -- call failed: " << status << ".";
     }
@@ -1577,6 +1738,7 @@
         const C2ConstGraphicBlock& block,
         const QueueBufferInput& input,
         QueueBufferOutput* output) {
+    ScopedTrace trace(ATRACE_TAG,"Codec2Client::Component::queueToOutputSurface");
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
@@ -1593,13 +1755,13 @@
         C2BlockPool::local_id_t blockPoolId) {
     std::scoped_lock lock(mOutputMutex);
     mOutputBufferQueue->stop();
-    Return<Status> transStatus = mBase1_0->setOutputSurface(
+    Return<c2_hidl::Status> transStatus = mBase1_0->setOutputSurface(
             static_cast<uint64_t>(blockPoolId), nullptr);
     if (!transStatus.isOk()) {
         LOG(ERROR) << "setOutputSurface(stopUsingOutputSurface) -- transaction failed.";
     } else {
         c2_status_t status =
-                static_cast<c2_status_t>(static_cast<Status>(transStatus));
+                static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
         if (status != C2_OK) {
             LOG(DEBUG) << "setOutputSurface(stopUsingOutputSurface) -- call failed: "
                        << status << ".";
@@ -1615,7 +1777,7 @@
     Return<void> transStatus = mBase1_0->connectToInputSurface(
             inputSurface->mBase,
             [&status, connection](
-                    Status s, const sp<IInputSurfaceConnection>& c) {
+                    c2_hidl::Status s, const sp<c2_hidl::IInputSurfaceConnection>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "connectToInputSurface -- call failed: "
@@ -1639,7 +1801,7 @@
     Return<void> transStatus = mBase1_0->connectToOmxInputSurface(
             producer, source,
             [&status, connection](
-                    Status s, const sp<IInputSurfaceConnection>& c) {
+                    c2_hidl::Status s, const sp<c2_hidl::IInputSurfaceConnection>& c) {
                 status = static_cast<c2_status_t>(s);
                 if (status != C2_OK) {
                     LOG(DEBUG) << "connectToOmxInputSurface -- call failed: "
@@ -1656,13 +1818,13 @@
 }
 
 c2_status_t Codec2Client::Component::disconnectFromInputSurface() {
-    Return<Status> transStatus = mBase1_0->disconnectFromInputSurface();
+    Return<c2_hidl::Status> transStatus = mBase1_0->disconnectFromInputSurface();
     if (!transStatus.isOk()) {
         LOG(ERROR) << "disconnectToInputSurface -- transaction failed.";
         return C2_TRANSACTION_FAILED;
     }
     c2_status_t status =
-            static_cast<c2_status_t>(static_cast<Status>(transStatus));
+            static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transStatus));
     if (status != C2_OK) {
         LOG(DEBUG) << "disconnectFromInputSurface -- call failed: "
                    << status << ".";
@@ -1709,13 +1871,13 @@
 }
 
 // Codec2Client::InputSurface
-Codec2Client::InputSurface::InputSurface(const sp<IInputSurface>& base)
+Codec2Client::InputSurface::InputSurface(const sp<c2_hidl::IInputSurface>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -1735,19 +1897,19 @@
     return mGraphicBufferProducer;
 }
 
-sp<IInputSurface> Codec2Client::InputSurface::getHalInterface() const {
+sp<c2_hidl::IInputSurface> Codec2Client::InputSurface::getHalInterface() const {
     return mBase;
 }
 
 // Codec2Client::InputSurfaceConnection
 Codec2Client::InputSurfaceConnection::InputSurfaceConnection(
-        const sp<IInputSurfaceConnection>& base)
+        const sp<c2_hidl::IInputSurfaceConnection>& base)
       : Configurable{
-            [base]() -> sp<IConfigurable> {
-                Return<sp<IConfigurable>> transResult =
+            [base]() -> sp<c2_hidl::IConfigurable> {
+                Return<sp<c2_hidl::IConfigurable>> transResult =
                         base->getConfigurable();
                 return transResult.isOk() ?
-                        static_cast<sp<IConfigurable>>(transResult) :
+                        static_cast<sp<c2_hidl::IConfigurable>>(transResult) :
                         nullptr;
             }()
         },
@@ -1755,8 +1917,8 @@
 }
 
 c2_status_t Codec2Client::InputSurfaceConnection::disconnect() {
-    Return<Status> transResult = mBase->disconnect();
-    return static_cast<c2_status_t>(static_cast<Status>(transResult));
+    Return<c2_hidl::Status> transResult = mBase->disconnect();
+    return static_cast<c2_status_t>(static_cast<c2_hidl::Status>(transResult));
 }
 
 }  // namespace android
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 5267394..6fa19b0 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -84,6 +84,13 @@
 struct IComponentStore;
 }  // namespace android::hardware::media::c2::V1_2
 
+namespace aidl::android::hardware::media::c2 {
+class IComponent;
+class IComponentInterface;
+class IComponentStore;
+class IConfigurable;
+}  // namespace aidl::android::hardware::media::c2
+
 namespace android::hardware::media::bufferpool::V2_0 {
 struct IClientManager;
 }  // namespace android::hardware::media::bufferpool::V2_0
@@ -106,7 +113,36 @@
 // declaration of an inner class is not possible.
 struct Codec2ConfigurableClient {
 
-    typedef ::android::hardware::media::c2::V1_0::IConfigurable Base;
+    typedef ::android::hardware::media::c2::V1_0::IConfigurable HidlBase;
+    typedef ::aidl::android::hardware::media::c2::IConfigurable AidlBase;
+
+    struct ImplBase {
+        virtual ~ImplBase() = default;
+
+        virtual const C2String& getName() const = 0;
+
+        virtual c2_status_t query(
+                const std::vector<C2Param*>& stackParams,
+                const std::vector<C2Param::Index> &heapParamIndices,
+                c2_blocking_t mayBlock,
+                std::vector<std::unique_ptr<C2Param>>* const heapParams) const = 0;
+
+        virtual c2_status_t config(
+                const std::vector<C2Param*> &params,
+                c2_blocking_t mayBlock,
+                std::vector<std::unique_ptr<C2SettingResult>>* const failures) = 0;
+
+        virtual c2_status_t querySupportedParams(
+                std::vector<std::shared_ptr<C2ParamDescriptor>>* const params
+                ) const = 0;
+
+        virtual c2_status_t querySupportedValues(
+                std::vector<C2FieldSupportedValuesQuery>& fields,
+                c2_blocking_t mayBlock) const = 0;
+    };
+
+    explicit Codec2ConfigurableClient(const sp<HidlBase> &hidlBase);
+    explicit Codec2ConfigurableClient(const std::shared_ptr<AidlBase> &aidlBase);
 
     const C2String& getName() const;
 
@@ -128,15 +164,11 @@
     c2_status_t querySupportedValues(
             std::vector<C2FieldSupportedValuesQuery>& fields,
             c2_blocking_t mayBlock) const;
+private:
+    struct HidlImpl;
+    struct AidlImpl;
 
-    // base cannot be null.
-    Codec2ConfigurableClient(const sp<Base>& base);
-
-protected:
-    sp<Base> mBase;
-    C2String mName;
-
-    friend struct Codec2Client;
+    const std::unique_ptr<ImplBase> mImpl;
 };
 
 struct Codec2Client : public Codec2ConfigurableClient {
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 4eebd1c..2f9773e 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "Codec2-OutputBufferQueue"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android-base/logging.h>
+#include <utils/Trace.h>
 
 #include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 #include <codec2/hidl/output.h>
@@ -388,6 +390,7 @@
     uint32_t generation;
     uint64_t bqId;
     int32_t bqSlot;
+    ScopedTrace trace(ATRACE_TAG,"Codec2-OutputBufferQueue::outputBuffer");
     bool display = V1_0::utils::displayBufferQueueBlock(block);
     if (!getBufferQueueAssignment(block, &generation, &bqId, &bqSlot) ||
         bqId == 0) {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index a75ce70..1153fb6 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2137,7 +2137,7 @@
     }
 
     std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
-    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+    status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers, true);
     if (err != OK) {
         if (err == NO_MEMORY) {
             // NO_MEMORY happens here when all the buffers are still
@@ -2160,7 +2160,6 @@
         const std::unique_ptr<Config> &config = *configLocked;
         return config->mBuffersBoundToCodec;
     }());
-
     {
         Mutexed<State>::Locked state(mState);
         if (state->get() != RESUMING) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 29afcee..7665d6b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -25,6 +25,8 @@
 #include <atomic>
 #include <list>
 #include <numeric>
+#include <thread>
+#include <chrono>
 
 #include <C2AllocatorGralloc.h>
 #include <C2PlatformSupport.h>
@@ -598,6 +600,8 @@
     size_t bufferSize = 0;
     c2_status_t blockRes = C2_OK;
     bool copied = false;
+    ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+            "CCodecBufferChannel::decrypt(%s)", mName).c_str());
     if (mSendEncryptedInfoBuffer) {
         static const C2MemoryUsage kDefaultReadWriteUsage{
             C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
@@ -1615,22 +1619,31 @@
 }
 
 status_t CCodecBufferChannel::prepareInitialInputBuffers(
-        std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers) {
+        std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers, bool retry) {
     if (mInputSurface) {
         return OK;
     }
 
     size_t numInputSlots = mInput.lock()->numSlots;
-
-    {
-        Mutexed<Input>::Locked input(mInput);
-        while (clientInputBuffers->size() < numInputSlots) {
-            size_t index;
-            sp<MediaCodecBuffer> buffer;
-            if (!input->buffers->requestNewBuffer(&index, &buffer)) {
-                break;
+    int retryCount = 1;
+    for (; clientInputBuffers->empty() && retryCount >= 0; retryCount--) {
+        {
+            Mutexed<Input>::Locked input(mInput);
+            while (clientInputBuffers->size() < numInputSlots) {
+                size_t index;
+                sp<MediaCodecBuffer> buffer;
+                if (!input->buffers->requestNewBuffer(&index, &buffer)) {
+                    break;
+                }
+                clientInputBuffers->emplace(index, buffer);
             }
-            clientInputBuffers->emplace(index, buffer);
+        }
+        if (!retry || (retryCount <= 0)) {
+            break;
+        }
+        if (clientInputBuffers->empty()) {
+            // wait: buffer may be in transit from component.
+            std::this_thread::sleep_for(std::chrono::milliseconds(4));
         }
     }
     if (clientInputBuffers->empty()) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 41f5ae2..f60b6fa 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -140,7 +140,8 @@
      *                                  initial input buffers.
      */
     status_t prepareInitialInputBuffers(
-            std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers);
+            std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers,
+            bool retry = false);
 
     /**
      * Request initial input buffers as prepared in clientInputBuffers.
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index f272499..0803dc3 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -23,6 +23,7 @@
 #include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
 #include <android/hardware/graphics/common/1.2/types.h>
 #include <cutils/native_handle.h>
+#include <drm/drm_fourcc.h>
 #include <gralloctypes/Gralloc4.h>
 #include <hardware/gralloc.h>
 #include <ui/GraphicBufferAllocator.h>
@@ -478,7 +479,25 @@
     // 'NATIVE' on Android means LITTLE_ENDIAN
     constexpr C2PlaneInfo::endianness_t kEndianness = C2PlaneInfo::NATIVE;
 
-    switch (mFormat) {
+    // Try to resolve IMPLEMENTATION_DEFINED format to accurate format if
+    // possible.
+    uint32_t format = mFormat;
+    uint32_t fourCc;
+    if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
+        !GraphicBufferMapper::get().getPixelFormatFourCC(mBuffer, &fourCc)) {
+        switch (fourCc)  {
+            case DRM_FORMAT_XBGR8888:
+                 format = static_cast<uint32_t>(PixelFormat4::RGBX_8888);
+                 break;
+            case DRM_FORMAT_ABGR8888:
+                 format = static_cast<uint32_t>(PixelFormat4::RGBA_8888);
+                 break;
+            default:
+                 break;
+        }
+    }
+
+    switch (format) {
         case static_cast<uint32_t>(PixelFormat4::RGBA_1010102): {
             // TRICKY: this is used for media as YUV444 in the case when it is queued directly to a
             // Surface. In all other cases it is RGBA. We don't know which case it is here, so
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 018e269..a56a216 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -16,7 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2Buffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <list>
 #include <map>
@@ -33,6 +35,7 @@
 
 namespace {
 
+using android::ScopedTrace;
 using android::C2AllocatorBlob;
 using android::C2AllocatorGralloc;
 using android::C2AllocatorIon;
@@ -1159,6 +1162,7 @@
         uint32_t capacity,
         C2MemoryUsage usage,
         std::shared_ptr<C2LinearBlock> *block /* nonnull */) {
+    ScopedTrace trace(ATRACE_TAG,"C2PooledBlockPool::fetchLinearBlock");
     if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchLinearBlock(capacity, usage, block);
     }
@@ -1174,6 +1178,7 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block) {
+    ScopedTrace trace(ATRACE_TAG,"C2PooledBlockPool::fetchGraphicBlock");
     if (mBufferPoolVer == VER_HIDL && mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block);
     }
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 5fb0c8f..960fa79 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -16,8 +16,10 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2BqBuffer"
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include <android/hardware_buffer.h>
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 #include <ui/BufferQueueDefs.h>
 #include <ui/GraphicBuffer.h>
@@ -37,6 +39,7 @@
 #include <map>
 #include <mutex>
 
+using ::android::ScopedTrace;
 using ::android::BufferQueueDefs::NUM_BUFFER_SLOTS;
 using ::android::C2AllocatorGralloc;
 using ::android::C2AndroidMemoryUsage;
@@ -1063,6 +1066,7 @@
         uint32_t format,
         C2MemoryUsage usage,
         std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
+    ScopedTrace trace(ATRACE_TAG,"C2BufferQueueBlockPool::fetchGraphicBlock");
     if (mImpl) {
         return mImpl->fetchGraphicBlock(width, height, format, usage, block, nullptr);
     }
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 7648c76..74f7e6b 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -623,6 +623,11 @@
  * (e.g. a USB audio interface, a DAC connected to headphones) to
  * specify allowable configurations of a particular device.
  *
+ * Channel masks are for input only, output only, or both input and output.
+ * These channel masks are different than those defined in AudioFormat.java.
+ * If an app gets a channel mask from Java API and wants to use it in AAudio,
+ * conversion should be done by the app.
+ *
  * Added in API level 32.
  */
 enum {
@@ -630,10 +635,6 @@
      * Invalid channel mask
      */
     AAUDIO_CHANNEL_INVALID = -1,
-
-    /**
-     * Output audio channel mask
-     */
     AAUDIO_CHANNEL_FRONT_LEFT = 1 << 0,
     AAUDIO_CHANNEL_FRONT_RIGHT = 1 << 1,
     AAUDIO_CHANNEL_FRONT_CENTER = 1 << 2,
@@ -661,62 +662,112 @@
     AAUDIO_CHANNEL_FRONT_WIDE_LEFT = 1 << 24,
     AAUDIO_CHANNEL_FRONT_WIDE_RIGHT = 1 << 25,
 
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_MONO = AAUDIO_CHANNEL_FRONT_LEFT,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_STEREO = AAUDIO_CHANNEL_FRONT_LEFT |
                             AAUDIO_CHANNEL_FRONT_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_2POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_TRI = AAUDIO_CHANNEL_FRONT_LEFT |
                          AAUDIO_CHANNEL_FRONT_RIGHT |
                          AAUDIO_CHANNEL_FRONT_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_TRI_BACK = AAUDIO_CHANNEL_FRONT_LEFT |
                               AAUDIO_CHANNEL_FRONT_RIGHT |
                               AAUDIO_CHANNEL_BACK_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_3POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_FRONT_CENTER |
                              AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_2POINT0POINT2 = AAUDIO_CHANNEL_FRONT_LEFT |
                                    AAUDIO_CHANNEL_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_2POINT1POINT2 = AAUDIO_CHANNEL_2POINT0POINT2 |
                                    AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_3POINT0POINT2 = AAUDIO_CHANNEL_FRONT_LEFT |
                                    AAUDIO_CHANNEL_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_FRONT_CENTER |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for Input and Output
+     */
     AAUDIO_CHANNEL_3POINT1POINT2 = AAUDIO_CHANNEL_3POINT0POINT2 |
                                    AAUDIO_CHANNEL_LOW_FREQUENCY,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_QUAD = AAUDIO_CHANNEL_FRONT_LEFT |
                           AAUDIO_CHANNEL_FRONT_RIGHT |
                           AAUDIO_CHANNEL_BACK_LEFT |
                           AAUDIO_CHANNEL_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_QUAD_SIDE = AAUDIO_CHANNEL_FRONT_LEFT |
                                AAUDIO_CHANNEL_FRONT_RIGHT |
                                AAUDIO_CHANNEL_SIDE_LEFT |
                                AAUDIO_CHANNEL_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_SURROUND = AAUDIO_CHANNEL_FRONT_LEFT |
                               AAUDIO_CHANNEL_FRONT_RIGHT |
                               AAUDIO_CHANNEL_FRONT_CENTER |
                               AAUDIO_CHANNEL_BACK_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_PENTA = AAUDIO_CHANNEL_QUAD |
                            AAUDIO_CHANNEL_FRONT_CENTER,
-    // aka 5POINT1_BACK
+    /**
+     * Supported for Input and Output. aka 5POINT1_BACK
+     */
     AAUDIO_CHANNEL_5POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_FRONT_CENTER |
                              AAUDIO_CHANNEL_LOW_FREQUENCY |
                              AAUDIO_CHANNEL_BACK_LEFT |
                              AAUDIO_CHANNEL_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_5POINT1_SIDE = AAUDIO_CHANNEL_FRONT_LEFT |
                                   AAUDIO_CHANNEL_FRONT_RIGHT |
                                   AAUDIO_CHANNEL_FRONT_CENTER |
                                   AAUDIO_CHANNEL_LOW_FREQUENCY |
                                   AAUDIO_CHANNEL_SIDE_LEFT |
                                   AAUDIO_CHANNEL_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_6POINT1 = AAUDIO_CHANNEL_FRONT_LEFT |
                              AAUDIO_CHANNEL_FRONT_RIGHT |
                              AAUDIO_CHANNEL_FRONT_CENTER |
@@ -724,32 +775,55 @@
                              AAUDIO_CHANNEL_BACK_LEFT |
                              AAUDIO_CHANNEL_BACK_RIGHT |
                              AAUDIO_CHANNEL_BACK_CENTER,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_7POINT1 = AAUDIO_CHANNEL_5POINT1 |
                              AAUDIO_CHANNEL_SIDE_LEFT |
                              AAUDIO_CHANNEL_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_5POINT1POINT2 = AAUDIO_CHANNEL_5POINT1 |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_5POINT1POINT4 = AAUDIO_CHANNEL_5POINT1 |
                                    AAUDIO_CHANNEL_TOP_FRONT_LEFT |
                                    AAUDIO_CHANNEL_TOP_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_TOP_BACK_LEFT |
                                    AAUDIO_CHANNEL_TOP_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_7POINT1POINT2 = AAUDIO_CHANNEL_7POINT1 |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_7POINT1POINT4 = AAUDIO_CHANNEL_7POINT1 |
                                    AAUDIO_CHANNEL_TOP_FRONT_LEFT |
                                    AAUDIO_CHANNEL_TOP_FRONT_RIGHT |
                                    AAUDIO_CHANNEL_TOP_BACK_LEFT |
                                    AAUDIO_CHANNEL_TOP_BACK_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_9POINT1POINT4 = AAUDIO_CHANNEL_7POINT1POINT4 |
                                    AAUDIO_CHANNEL_FRONT_WIDE_LEFT |
                                    AAUDIO_CHANNEL_FRONT_WIDE_RIGHT,
+    /**
+     * Supported for only Output
+     */
     AAUDIO_CHANNEL_9POINT1POINT6 = AAUDIO_CHANNEL_9POINT1POINT4 |
                                    AAUDIO_CHANNEL_TOP_SIDE_LEFT |
                                    AAUDIO_CHANNEL_TOP_SIDE_RIGHT,
-
+    /**
+     * Supported for only Input
+     */
     AAUDIO_CHANNEL_FRONT_BACK = AAUDIO_CHANNEL_FRONT_CENTER |
                                 AAUDIO_CHANNEL_BACK_CENTER,
 };
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.cpp b/media/libaaudio/src/client/AAudioFlowGraph.cpp
index 5444565..b7e0ae6 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.cpp
+++ b/media/libaaudio/src/client/AAudioFlowGraph.cpp
@@ -39,18 +39,21 @@
 
 aaudio_result_t AAudioFlowGraph::configure(audio_format_t sourceFormat,
                           int32_t sourceChannelCount,
+                          int32_t sourceSampleRate,
                           audio_format_t sinkFormat,
                           int32_t sinkChannelCount,
+                          int32_t sinkSampleRate,
                           bool useMonoBlend,
+                          bool useVolumeRamps,
                           float audioBalance,
-                          bool isExclusive) {
+                          aaudio::resampler::MultiChannelResampler::Quality resamplerQuality) {
     FlowGraphPortFloatOutput *lastOutput = nullptr;
 
-    // TODO change back to ALOGD
-    ALOGI("%s() source format = 0x%08x, channels = %d, sink format = 0x%08x, channels = %d, "
-          "useMonoBlend = %d, audioBalance = %f, isExclusive %d",
-          __func__, sourceFormat, sourceChannelCount, sinkFormat, sinkChannelCount,
-          useMonoBlend, audioBalance, isExclusive);
+    ALOGD("%s() source format = 0x%08x, channels = %d, sample rate = %d, "
+          "sink format = 0x%08x, channels = %d, sample rate = %d, "
+          "useMonoBlend = %d, audioBalance = %f, useVolumeRamps %d",
+          __func__, sourceFormat, sourceChannelCount, sourceSampleRate, sinkFormat,
+          sinkChannelCount, sinkSampleRate, useMonoBlend, audioBalance, useVolumeRamps);
 
     switch (sourceFormat) {
         case AUDIO_FORMAT_PCM_FLOAT:
@@ -85,6 +88,15 @@
         lastOutput = &mLimiter->output;
     }
 
+    if (sourceSampleRate != sinkSampleRate) {
+        mResampler.reset(aaudio::resampler::MultiChannelResampler::make(sourceChannelCount,
+                sourceSampleRate, sinkSampleRate, resamplerQuality));
+        mRateConverter = std::make_unique<SampleRateConverter>(sourceChannelCount,
+                                                               *mResampler);
+        lastOutput->connect(&mRateConverter->input);
+        lastOutput = &mRateConverter->output;
+    }
+
     // Expand the number of channels if required.
     if (sourceChannelCount == 1 && sinkChannelCount > 1) {
         mChannelConverter = std::make_unique<MonoToMultiConverter>(sinkChannelCount);
@@ -95,8 +107,7 @@
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
 
-    // Apply volume ramps for only exclusive streams.
-    if (isExclusive) {
+    if (useVolumeRamps) {
         // Apply volume ramps to set the left/right audio balance and target volumes.
         // The signals will be decoupled, volume ramps will be applied, before the signals are
         // combined again.
@@ -137,9 +148,14 @@
     return AAUDIO_OK;
 }
 
-void AAudioFlowGraph::process(const void *source, void *destination, int32_t numFrames) {
-    mSource->setData(source, numFrames);
-    mSink->read(destination, numFrames);
+int32_t AAudioFlowGraph::pull(void *destination, int32_t targetFramesToRead) {
+    return mSink->read(destination, targetFramesToRead);
+}
+
+int32_t AAudioFlowGraph::process(const void *source, int32_t numFramesToWrite, void *destination,
+                    int32_t targetFramesToRead) {
+    mSource->setData(source, numFramesToWrite);
+    return mSink->read(destination, targetFramesToRead);
 }
 
 /**
diff --git a/media/libaaudio/src/client/AAudioFlowGraph.h b/media/libaaudio/src/client/AAudioFlowGraph.h
index 35fef37..e1d517e 100644
--- a/media/libaaudio/src/client/AAudioFlowGraph.h
+++ b/media/libaaudio/src/client/AAudioFlowGraph.h
@@ -30,6 +30,7 @@
 #include <flowgraph/MonoToMultiConverter.h>
 #include <flowgraph/MultiToManyConverter.h>
 #include <flowgraph/RampLinear.h>
+#include <flowgraph/SampleRateConverter.h>
 
 class AAudioFlowGraph {
 public:
@@ -38,23 +39,57 @@
      *
      * @param sourceFormat
      * @param sourceChannelCount
+     * @param sourceSampleRate
      * @param sinkFormat
      * @param sinkChannelCount
+     * @param sinkSampleRate
      * @param useMonoBlend
+     * @param useVolumeRamps
      * @param audioBalance
-     * @param channelMask
-     * @param isExclusive
+     * @param resamplerQuality
      * @return
      */
     aaudio_result_t configure(audio_format_t sourceFormat,
                               int32_t sourceChannelCount,
+                              int32_t sourceSampleRate,
                               audio_format_t sinkFormat,
                               int32_t sinkChannelCount,
+                              int32_t sinkSampleRate,
                               bool useMonoBlend,
+                              bool useVolumeRamps,
                               float audioBalance,
-                              bool isExclusive);
+                              aaudio::resampler::MultiChannelResampler::Quality resamplerQuality);
 
-    void process(const void *source, void *destination, int32_t numFrames);
+    /**
+     * Attempt to read targetFramesToRead from the flowgraph.
+     * This function returns the number of frames actually read.
+     *
+     * This function does nothing if process() was not called before.
+     *
+     * @param destination
+     * @param targetFramesToRead
+     * @return numFramesRead
+     */
+    int32_t pull(void *destination, int32_t targetFramesToRead);
+
+    /**
+     * Set numFramesToWrite frames from the source into the flowgraph.
+     * Then, attempt to read targetFramesToRead from the flowgraph.
+     * This function returns the number of frames actually read.
+     *
+     * There may be data still in the flowgraph if targetFramesToRead is not large enough.
+     * Before calling process() again, pull() must be called until until all the data is consumed.
+     *
+     * TODO: b/289510598 - Calculate the exact number of input frames needed for Y output frames.
+     *
+     * @param source
+     * @param numFramesToWrite
+     * @param destination
+     * @param targetFramesToRead
+     * @return numFramesRead
+     */
+    int32_t process(const void *source, int32_t numFramesToWrite, void *destination,
+                    int32_t targetFramesToRead);
 
     /**
      * @param volume between 0.0 and 1.0
@@ -73,6 +108,8 @@
 
 private:
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::FlowGraphSourceBuffered> mSource;
+    std::unique_ptr<RESAMPLER_OUTER_NAMESPACE::resampler::MultiChannelResampler> mResampler;
+    std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::SampleRateConverter> mRateConverter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoBlend> mMonoBlend;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::Limiter> mLimiter;
     std::unique_ptr<FLOWGRAPH_OUTER_NAMESPACE::flowgraph::MonoToMultiConverter> mChannelConverter;
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 84c715f..9b1ad72 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -63,6 +63,8 @@
 
 #define LOG_TIMESTAMPS            0
 
+#define ENABLE_SAMPLE_RATE_CONVERTER 1
+
 AudioStreamInternal::AudioStreamInternal(AAudioServiceInterface  &serviceInterface, bool inService)
         : AudioStream()
         , mClockModel()
@@ -132,12 +134,6 @@
 
     request.getConfiguration().setBufferCapacity(builder.getBufferCapacity());
 
-    request.getConfiguration().setHardwareSamplesPerFrame(builder.getHardwareSamplesPerFrame());
-    request.getConfiguration().setHardwareSampleRate(builder.getHardwareSampleRate());
-    request.getConfiguration().setHardwareFormat(builder.getHardwareFormat());
-
-    mDeviceChannelCount = getSamplesPerFrame(); // Assume it will be the same. Update if not.
-
     mServiceStreamHandleInfo = mServiceInterface.openStream(request, configurationOutput);
     if (getServiceHandle() < 0
             && (request.getConfiguration().getSamplesPerFrame() == 1
@@ -181,9 +177,6 @@
         setChannelMask(configurationOutput.getChannelMask());
     }
 
-    mDeviceChannelCount = configurationOutput.getSamplesPerFrame();
-
-    setSampleRate(configurationOutput.getSampleRate());
     setDeviceId(configurationOutput.getDeviceId());
     setSessionId(configurationOutput.getSessionId());
     setSharingMode(configurationOutput.getSharingMode());
@@ -194,8 +187,21 @@
     setIsContentSpatialized(configurationOutput.isContentSpatialized());
     setInputPreset(configurationOutput.getInputPreset());
 
+    setDeviceSampleRate(configurationOutput.getSampleRate());
+
+    if (getSampleRate() == AAUDIO_UNSPECIFIED) {
+        setSampleRate(configurationOutput.getSampleRate());
+    }
+
+#if !ENABLE_SAMPLE_RATE_CONVERTER
+    if (getSampleRate() != getDeviceSampleRate()) {
+        goto error;
+    }
+#endif
+
     // Save device format so we can do format conversion and volume scaling together.
     setDeviceFormat(configurationOutput.getFormat());
+    setDeviceSamplesPerFrame(configurationOutput.getSamplesPerFrame());
 
     setHardwareSamplesPerFrame(configurationOutput.getHardwareSamplesPerFrame());
     setHardwareSampleRate(configurationOutput.getHardwareSampleRate());
@@ -233,39 +239,46 @@
 }
 
 aaudio_result_t AudioStreamInternal::configureDataInformation(int32_t callbackFrames) {
-    int32_t framesPerHardwareBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
+    int32_t deviceFramesPerBurst = mEndpointDescriptor.dataQueueDescriptor.framesPerBurst;
 
     // Scale up the burst size to meet the minimum equivalent in microseconds.
     // This is to avoid waking the CPU too often when the HW burst is very small
-    // or at high sample rates.
-    int32_t framesPerBurst = framesPerHardwareBurst;
+    // or at high sample rates. The actual number of frames that we call back to
+    // the app with will be 0 < N <= framesPerBurst so round up the division.
+    int32_t framesPerBurst = (static_cast<int64_t>(deviceFramesPerBurst) * getSampleRate() +
+             getDeviceSampleRate() - 1) / getDeviceSampleRate();
     int32_t burstMicros = 0;
     const int32_t burstMinMicros = android::AudioSystem::getAAudioHardwareBurstMinUsec();
     do {
         if (burstMicros > 0) {  // skip first loop
+            deviceFramesPerBurst *= 2;
             framesPerBurst *= 2;
         }
         burstMicros = framesPerBurst * static_cast<int64_t>(1000000) / getSampleRate();
     } while (burstMicros < burstMinMicros);
     ALOGD("%s() original HW burst = %d, minMicros = %d => SW burst = %d\n",
-          __func__, framesPerHardwareBurst, burstMinMicros, framesPerBurst);
+          __func__, deviceFramesPerBurst, burstMinMicros, framesPerBurst);
 
     // Validate final burst size.
     if (framesPerBurst < MIN_FRAMES_PER_BURST || framesPerBurst > MAX_FRAMES_PER_BURST) {
         ALOGE("%s - framesPerBurst out of range = %d", __func__, framesPerBurst);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
+    setDeviceFramesPerBurst(deviceFramesPerBurst);
     setFramesPerBurst(framesPerBurst); // only save good value
 
-    mBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+    mDeviceBufferCapacityInFrames = mEndpointDescriptor.dataQueueDescriptor.capacityInFrames;
+
+    mBufferCapacityInFrames = static_cast<int64_t>(mDeviceBufferCapacityInFrames)
+            * getSampleRate() / getDeviceSampleRate();
     if (mBufferCapacityInFrames < getFramesPerBurst()
             || mBufferCapacityInFrames > MAX_BUFFER_CAPACITY_IN_FRAMES) {
         ALOGE("%s - bufferCapacity out of range = %d", __func__, mBufferCapacityInFrames);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
 
-    mClockModel.setSampleRate(getSampleRate());
-    mClockModel.setFramesPerBurst(framesPerHardwareBurst);
+    mClockModel.setSampleRate(getDeviceSampleRate());
+    mClockModel.setFramesPerBurst(deviceFramesPerBurst);
 
     if (isDataCallbackSet()) {
         mCallbackFrames = callbackFrames;
@@ -315,7 +328,8 @@
         mTimeOffsetNanos = offsetMicros * AAUDIO_NANOS_PER_MICROSECOND;
     }
 
-    setBufferSize(mBufferCapacityInFrames / 2); // Default buffer size to match Q
+    // Default buffer size to match Q
+    setBufferSize(mBufferCapacityInFrames / 2);
     return AAUDIO_OK;
 }
 
@@ -374,9 +388,9 @@
     // Cache the buffer size which may be from client.
     const int32_t previousBufferSize = mBufferSizeInFrames;
     // Copy all available data from current data queue.
-    uint8_t buffer[getBufferCapacity() * getBytesPerFrame()];
-    android::fifo_frames_t fullFramesAvailable =
-            mAudioEndpoint->read(buffer, getBufferCapacity());
+    uint8_t buffer[getDeviceBufferCapacity() * getBytesPerFrame()];
+    android::fifo_frames_t fullFramesAvailable = mAudioEndpoint->read(buffer,
+            getDeviceBufferCapacity());
     mEndPointParcelable.closeDataFileDescriptor();
     aaudio_result_t result = mServiceInterface.exitStandby(
             mServiceStreamHandleInfo, endpointParcelable);
@@ -408,7 +422,7 @@
         goto exit;
     }
     // Write data from previous data buffer to new endpoint.
-    if (android::fifo_frames_t framesWritten =
+    if (const android::fifo_frames_t framesWritten =
                 mAudioEndpoint->write(buffer, fullFramesAvailable);
             framesWritten != fullFramesAvailable) {
         ALOGW("Some data lost after exiting standby, frames written: %d, "
@@ -448,7 +462,7 @@
         ALOGD("requestStart() but DISCONNECTED");
         return AAUDIO_ERROR_DISCONNECTED;
     }
-    aaudio_stream_state_t originalState = getState();
+    const aaudio_stream_state_t originalState = getState();
     setState(AAUDIO_STREAM_STATE_STARTING);
 
     // Clear any stale timestamps from the previous run.
@@ -605,7 +619,11 @@
     // Generated in server and passed to client. Return latest.
     if (mAtomicInternalTimestamp.isValid()) {
         Timestamp timestamp = mAtomicInternalTimestamp.read();
-        int64_t position = timestamp.getPosition() + mFramesOffsetFromService;
+        // This should not overflow as timestamp.getPosition() should be a position in a buffer and
+        // not the actual timestamp. timestamp.getNanoseconds() below uses the actual timestamp.
+        // At 48000 Hz we can run for over 100 years before overflowing the int64_t.
+        int64_t position = (timestamp.getPosition() + mFramesOffsetFromService) * getSampleRate() /
+                getDeviceSampleRate();
         if (position >= 0) {
             *framePosition = position;
             *timeNanoseconds = timestamp.getNanoseconds();
@@ -889,7 +907,8 @@
         adjustedFrames = maximumSize;
     } else {
         // Round to the next highest burst size.
-        int32_t numBursts = (adjustedFrames + getFramesPerBurst() - 1) / getFramesPerBurst();
+        int32_t numBursts = (static_cast<int64_t>(adjustedFrames) + getFramesPerBurst() - 1) /
+                getFramesPerBurst();
         adjustedFrames = numBursts * getFramesPerBurst();
         // Clip just in case maximumSize is not a multiple of getFramesPerBurst().
         adjustedFrames = std::min(maximumSize, adjustedFrames);
@@ -897,23 +916,32 @@
 
     if (mAudioEndpoint) {
         // Clip against the actual size from the endpoint.
-        int32_t actualFrames = 0;
+        int32_t actualFramesDevice = 0;
+        int32_t maximumFramesDevice = (static_cast<int64_t>(maximumSize) * getDeviceSampleRate()
+                + getSampleRate() - 1) / getSampleRate();
         // Set to maximum size so we can write extra data when ready in order to reduce glitches.
         // The amount we keep in the buffer is controlled by mBufferSizeInFrames.
-        mAudioEndpoint->setBufferSizeInFrames(maximumSize, &actualFrames);
+        mAudioEndpoint->setBufferSizeInFrames(maximumFramesDevice, &actualFramesDevice);
+        int32_t actualFrames = (static_cast<int64_t>(actualFramesDevice) * getSampleRate() +
+                 getDeviceSampleRate() - 1) / getDeviceSampleRate();
         // actualFrames should be <= actual maximum size of endpoint
         adjustedFrames = std::min(actualFrames, adjustedFrames);
     }
 
-    if (adjustedFrames != mBufferSizeInFrames) {
+    const int32_t bufferSizeInFrames = adjustedFrames;
+    const int32_t deviceBufferSizeInFrames = static_cast<int64_t>(bufferSizeInFrames) *
+            getDeviceSampleRate() / getSampleRate();
+
+    if (deviceBufferSizeInFrames != mDeviceBufferSizeInFrames) {
         android::mediametrics::LogItem(mMetricsId)
                 .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE)
-                .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, adjustedFrames)
+                .set(AMEDIAMETRICS_PROP_BUFFERSIZEFRAMES, deviceBufferSizeInFrames)
                 .set(AMEDIAMETRICS_PROP_UNDERRUN, (int32_t) getXRunCount())
                 .record();
     }
 
-    mBufferSizeInFrames = adjustedFrames;
+    mBufferSizeInFrames = bufferSizeInFrames;
+    mDeviceBufferSizeInFrames = deviceBufferSizeInFrames;
     ALOGV("%s(%d) returns %d", __func__, requestedFrames, adjustedFrames);
     return (aaudio_result_t) adjustedFrames;
 }
@@ -922,10 +950,18 @@
     return mBufferSizeInFrames;
 }
 
+int32_t AudioStreamInternal::getDeviceBufferSize() const {
+    return mDeviceBufferSizeInFrames;
+}
+
 int32_t AudioStreamInternal::getBufferCapacity() const {
     return mBufferCapacityInFrames;
 }
 
+int32_t AudioStreamInternal::getDeviceBufferCapacity() const {
+    return mDeviceBufferCapacityInFrames;
+}
+
 bool AudioStreamInternal::isClockModelInControl() const {
     return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
 }
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index 9c06121..a5981b1 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -22,8 +22,9 @@
 
 #include "binding/AudioEndpointParcelable.h"
 #include "binding/AAudioServiceInterface.h"
-#include "client/IsochronousClockModel.h"
+#include "client/AAudioFlowGraph.h"
 #include "client/AudioEndpoint.h"
+#include "client/IsochronousClockModel.h"
 #include "core/AudioStream.h"
 #include "utility/AudioClock.h"
 
@@ -56,8 +57,12 @@
 
     int32_t getBufferSize() const override;
 
+    int32_t getDeviceBufferSize() const;
+
     int32_t getBufferCapacity() const override;
 
+    int32_t getDeviceBufferCapacity() const override;
+
     int32_t getXRunCount() const override {
         return mXRunCount;
     }
@@ -133,8 +138,6 @@
     // Calculate timeout for an operation involving framesPerOperation.
     int64_t calculateReasonableTimeout(int32_t framesPerOperation);
 
-    int32_t getDeviceChannelCount() const { return mDeviceChannelCount; }
-
     /**
      * @return true if running in audio service, versus in app process
      */
@@ -177,6 +180,8 @@
     int64_t                  mLastFramesWritten = 0;
     int64_t                  mLastFramesRead = 0;
 
+    AAudioFlowGraph          mFlowGraph;
+
 private:
     /*
      * Asynchronous write with data conversion.
@@ -206,13 +211,10 @@
 
     int64_t                  mServiceLatencyNanos = 0;
 
-    // Sometimes the hardware is operating with a different channel count from the app.
-    // Then we require conversion in AAudio.
-    int32_t                  mDeviceChannelCount = 0;
-
     int32_t                  mBufferSizeInFrames = 0; // local threshold to control latency
+    int32_t                  mDeviceBufferSizeInFrames = 0;
     int32_t                  mBufferCapacityInFrames = 0;
-
+    int32_t                  mDeviceBufferCapacityInFrames = 0;
 
 };
 
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
index f5cc2be..7d7b4ef 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.cpp
@@ -47,6 +47,27 @@
 
 }
 
+aaudio_result_t AudioStreamInternalCapture::open(const AudioStreamBuilder &builder) {
+    aaudio_result_t result = AudioStreamInternal::open(builder);
+    if (result == AAUDIO_OK) {
+        result = mFlowGraph.configure(getDeviceFormat(),
+                             getDeviceSamplesPerFrame(),
+                             getDeviceSampleRate(),
+                             getFormat(),
+                             getSamplesPerFrame(),
+                             getSampleRate(),
+                             getRequireMonoBlend(),
+                             false /* useVolumeRamps */,
+                             getAudioBalance(),
+                             aaudio::resampler::MultiChannelResampler::Quality::Medium);
+
+        if (result != AAUDIO_OK) {
+            safeReleaseClose();
+        }
+    }
+    return result;
+}
+
 void AudioStreamInternalCapture::advanceClientToMatchServerPosition(int32_t serverMargin) {
     int64_t readCounter = mAudioEndpoint->getDataReadCounter();
     int64_t writeCounter = mAudioEndpoint->getDataWriteCounter() + serverMargin;
@@ -149,7 +170,8 @@
                 // Calculate frame position based off of the readCounter because
                 // the writeCounter might have just advanced in the background,
                 // causing us to sleep until a later burst.
-                int64_t nextPosition = mAudioEndpoint->getDataReadCounter() + getFramesPerBurst();
+                const int64_t nextPosition = mAudioEndpoint->getDataReadCounter() +
+                        getDeviceFramesPerBurst();
                 wakeTime = mClockModel.convertPositionToLatestTime(nextPosition);
             }
                 break;
@@ -166,42 +188,73 @@
 
 aaudio_result_t AudioStreamInternalCapture::readNowWithConversion(void *buffer,
                                                                 int32_t numFrames) {
-    // ALOGD("readNowWithConversion(%p, %d)",
-    //              buffer, numFrames);
     WrappingBuffer wrappingBuffer;
-    uint8_t *destination = (uint8_t *) buffer;
-    int32_t framesLeft = numFrames;
+    uint8_t *byteBuffer = (uint8_t *) buffer;
+    int32_t framesLeftInByteBuffer = numFrames;
+
+    if (framesLeftInByteBuffer > 0) {
+        // Pull data from the flowgraph in case there is residual data.
+        const int32_t framesActuallyWrittenToByteBuffer = mFlowGraph.pull(
+                (void *)byteBuffer,
+                framesLeftInByteBuffer);
+
+        const int32_t numBytesActuallyWrittenToByteBuffer =
+                framesActuallyWrittenToByteBuffer * getBytesPerFrame();
+        byteBuffer += numBytesActuallyWrittenToByteBuffer;
+        framesLeftInByteBuffer -= framesActuallyWrittenToByteBuffer;
+    }
 
     mAudioEndpoint->getFullFramesAvailable(&wrappingBuffer);
 
-    // Read data in one or two parts.
-    for (int partIndex = 0; framesLeft > 0 && partIndex < WrappingBuffer::SIZE; partIndex++) {
-        int32_t framesToProcess = framesLeft;
-        const int32_t framesAvailable = wrappingBuffer.numFrames[partIndex];
-        if (framesAvailable <= 0) break;
+    // Write data in one or two parts.
+    int partIndex = 0;
+    int framesReadFromAudioEndpoint = 0;
+    while (framesLeftInByteBuffer > 0 && partIndex < WrappingBuffer::SIZE) {
+        const int32_t totalFramesInWrappingBuffer = wrappingBuffer.numFrames[partIndex];
+        int32_t framesAvailableInWrappingBuffer = totalFramesInWrappingBuffer;
+        uint8_t *currentWrappingBuffer = (uint8_t *) wrappingBuffer.data[partIndex];
 
-        if (framesToProcess > framesAvailable) {
-            framesToProcess = framesAvailable;
+        // Put data from the wrapping buffer into the flowgraph 8 frames at a time.
+        // Continuously pull as much data as possible from the flowgraph into the byte buffer.
+        // The return value of mFlowGraph.process is the number of frames actually pulled.
+        while (framesAvailableInWrappingBuffer > 0 && framesLeftInByteBuffer > 0) {
+            const int32_t framesToReadFromWrappingBuffer = std::min(flowgraph::kDefaultBufferSize,
+                    framesAvailableInWrappingBuffer);
+
+            const int32_t numBytesToReadFromWrappingBuffer = getBytesPerDeviceFrame() *
+                    framesToReadFromWrappingBuffer;
+
+            // If framesActuallyWrittenToByteBuffer < framesLeftInByteBuffer, it is guaranteed
+            // that all the data is pulled. If there is no more space in the byteBuffer, the
+            // remaining data will be pulled in the following readNowWithConversion().
+            const int32_t framesActuallyWrittenToByteBuffer = mFlowGraph.process(
+                    (void *)currentWrappingBuffer,
+                    framesToReadFromWrappingBuffer,
+                    (void *)byteBuffer,
+                    framesLeftInByteBuffer);
+
+            const int32_t numBytesActuallyWrittenToByteBuffer =
+                    framesActuallyWrittenToByteBuffer * getBytesPerFrame();
+            byteBuffer += numBytesActuallyWrittenToByteBuffer;
+            framesLeftInByteBuffer -= framesActuallyWrittenToByteBuffer;
+            currentWrappingBuffer += numBytesToReadFromWrappingBuffer;
+            framesAvailableInWrappingBuffer -= framesToReadFromWrappingBuffer;
+
+            //ALOGD("%s() numBytesActuallyWrittenToByteBuffer %d, framesLeftInByteBuffer %d"
+            //      "framesAvailableInWrappingBuffer %d, framesReadFromAudioEndpoint %d"
+            //      , __func__, numBytesActuallyWrittenToByteBuffer, framesLeftInByteBuffer,
+            //      framesAvailableInWrappingBuffer, framesReadFromAudioEndpoint);
         }
-
-        const int32_t numBytes = getBytesPerFrame() * framesToProcess;
-        const int32_t numSamples = framesToProcess * getSamplesPerFrame();
-
-        const audio_format_t sourceFormat = getDeviceFormat();
-        const audio_format_t destinationFormat = getFormat();
-
-        memcpy_by_audio_format(destination, destinationFormat,
-                wrappingBuffer.data[partIndex], sourceFormat, numSamples);
-
-        destination += numBytes;
-        framesLeft -= framesToProcess;
+        framesReadFromAudioEndpoint += totalFramesInWrappingBuffer -
+                framesAvailableInWrappingBuffer;
+        partIndex++;
     }
 
-    int32_t framesProcessed = numFrames - framesLeft;
-    mAudioEndpoint->advanceReadIndex(framesProcessed);
+    // The audio endpoint should reference the number of frames written to the wrapping buffer.
+    mAudioEndpoint->advanceReadIndex(framesReadFromAudioEndpoint);
 
-    //ALOGD("readNowWithConversion() returns %d", framesProcessed);
-    return framesProcessed;
+    // The internal code should use the number of frames read from the app.
+    return numFrames - framesLeftInByteBuffer;
 }
 
 int64_t AudioStreamInternalCapture::getFramesWritten() {
diff --git a/media/libaaudio/src/client/AudioStreamInternalCapture.h b/media/libaaudio/src/client/AudioStreamInternalCapture.h
index 87017de..10e247d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalCapture.h
+++ b/media/libaaudio/src/client/AudioStreamInternalCapture.h
@@ -32,6 +32,8 @@
                                         bool inService = false);
     virtual ~AudioStreamInternalCapture() = default;
 
+    aaudio_result_t open(const AudioStreamBuilder &builder) override;
+
     aaudio_result_t read(void *buffer,
                          int32_t numFrames,
                          int64_t timeoutNanoseconds) override;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 89dd8ff..ac927ae 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -48,14 +48,18 @@
 
 aaudio_result_t AudioStreamInternalPlay::open(const AudioStreamBuilder &builder) {
     aaudio_result_t result = AudioStreamInternal::open(builder);
+    const bool useVolumeRamps = (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE);
     if (result == AAUDIO_OK) {
         result = mFlowGraph.configure(getFormat(),
                              getSamplesPerFrame(),
+                             getSampleRate(),
                              getDeviceFormat(),
-                             getDeviceChannelCount(),
+                             getDeviceSamplesPerFrame(),
+                             getDeviceSampleRate(),
                              getRequireMonoBlend(),
+                             useVolumeRamps,
                              getAudioBalance(),
-                             (getSharingMode() == AAUDIO_SHARING_MODE_EXCLUSIVE));
+                             aaudio::resampler::MultiChannelResampler::Quality::Medium);
 
         if (result != AAUDIO_OK) {
             safeReleaseClose();
@@ -186,7 +190,7 @@
     // Sleep if there is too much data in the buffer.
     // Calculate an ideal time to wake up.
     if (wakeTimePtr != nullptr
-            && (mAudioEndpoint->getFullFramesAvailable() >= getBufferSize())) {
+            && (mAudioEndpoint->getFullFramesAvailable() >= getDeviceBufferSize())) {
         // By default wake up a few milliseconds from now.  // TODO review
         int64_t wakeTime = currentNanoTime + (1 * AAUDIO_NANOS_PER_MILLISECOND);
         aaudio_stream_state_t state = getState();
@@ -206,12 +210,12 @@
                 // If the appBufferSize is smaller than the endpointBufferSize then
                 // we will have room to write data beyond the appBufferSize.
                 // That is a technique used to reduce glitches without adding latency.
-                const int32_t appBufferSize = getBufferSize();
+                const int64_t appBufferSize = getDeviceBufferSize();
                 // The endpoint buffer size is set to the maximum that can be written.
                 // If we use it then we must carve out some room to write data when we wake up.
-                const int32_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
-                        - getFramesPerBurst();
-                const int32_t bestBufferSize = std::min(appBufferSize, endBufferSize);
+                const int64_t endBufferSize = mAudioEndpoint->getBufferSizeInFrames()
+                        - getDeviceFramesPerBurst();
+                const int64_t bestBufferSize = std::min(appBufferSize, endBufferSize);
                 int64_t targetReadPosition = mAudioEndpoint->getDataWriteCounter() - bestBufferSize;
                 wakeTime = mClockModel.convertPositionToTime(targetReadPosition);
             }
@@ -232,37 +236,78 @@
                                                             int32_t numFrames) {
     WrappingBuffer wrappingBuffer;
     uint8_t *byteBuffer = (uint8_t *) buffer;
-    int32_t framesLeft = numFrames;
+    int32_t framesLeftInByteBuffer = numFrames;
 
     mAudioEndpoint->getEmptyFramesAvailable(&wrappingBuffer);
 
     // Write data in one or two parts.
     int partIndex = 0;
-    while (framesLeft > 0 && partIndex < WrappingBuffer::SIZE) {
-        int32_t framesToWrite = framesLeft;
-        int32_t framesAvailable = wrappingBuffer.numFrames[partIndex];
-        if (framesAvailable > 0) {
-            if (framesToWrite > framesAvailable) {
-                framesToWrite = framesAvailable;
-            }
+    int framesWrittenToAudioEndpoint = 0;
+    while (framesLeftInByteBuffer > 0 && partIndex < WrappingBuffer::SIZE) {
+        int32_t framesAvailableInWrappingBuffer = wrappingBuffer.numFrames[partIndex];
+        uint8_t *currentWrappingBuffer = (uint8_t *) wrappingBuffer.data[partIndex];
 
-            int32_t numBytes = getBytesPerFrame() * framesToWrite;
+        if (framesAvailableInWrappingBuffer > 0) {
+            // Pull data from the flowgraph in case there is residual data.
+            const int32_t framesActuallyWrittenToWrappingBuffer = mFlowGraph.pull(
+                (void*) currentWrappingBuffer,
+                framesAvailableInWrappingBuffer);
 
-            mFlowGraph.process((void *)byteBuffer,
-                               wrappingBuffer.data[partIndex],
-                               framesToWrite);
+            const int32_t numBytesActuallyWrittenToWrappingBuffer =
+                framesActuallyWrittenToWrappingBuffer * getBytesPerDeviceFrame();
+            currentWrappingBuffer += numBytesActuallyWrittenToWrappingBuffer;
+            framesAvailableInWrappingBuffer -= framesActuallyWrittenToWrappingBuffer;
+            framesWrittenToAudioEndpoint += framesActuallyWrittenToWrappingBuffer;
+        }
 
-            byteBuffer += numBytes;
-            framesLeft -= framesToWrite;
-        } else {
-            break;
+        // Put data from byteBuffer into the flowgraph one buffer (8 frames) at a time.
+        // Continuously pull as much data as possible from the flowgraph into the wrapping buffer.
+        // The return value of mFlowGraph.process is the number of frames actually pulled.
+        while (framesAvailableInWrappingBuffer > 0 && framesLeftInByteBuffer > 0) {
+            const int32_t framesToWriteFromByteBuffer = std::min(flowgraph::kDefaultBufferSize,
+                    framesLeftInByteBuffer);
+
+            const int32_t numBytesToWriteFromByteBuffer = getBytesPerFrame() *
+                    framesToWriteFromByteBuffer;
+
+            //ALOGD("%s() framesLeftInByteBuffer %d, framesAvailableInWrappingBuffer %d"
+            //      "framesToWriteFromByteBuffer %d, numBytesToWriteFromByteBuffer %d"
+            //      , __func__, framesLeftInByteBuffer, framesAvailableInWrappingBuffer,
+            //      framesToWriteFromByteBuffer, numBytesToWriteFromByteBuffer);
+
+            const int32_t framesActuallyWrittenToWrappingBuffer = mFlowGraph.process(
+                    (void *)byteBuffer,
+                    framesToWriteFromByteBuffer,
+                    (void *)currentWrappingBuffer,
+                    framesAvailableInWrappingBuffer);
+
+            byteBuffer += numBytesToWriteFromByteBuffer;
+            framesLeftInByteBuffer -= framesToWriteFromByteBuffer;
+            const int32_t numBytesActuallyWrittenToWrappingBuffer =
+                    framesActuallyWrittenToWrappingBuffer * getBytesPerDeviceFrame();
+            currentWrappingBuffer += numBytesActuallyWrittenToWrappingBuffer;
+            framesAvailableInWrappingBuffer -= framesActuallyWrittenToWrappingBuffer;
+            framesWrittenToAudioEndpoint += framesActuallyWrittenToWrappingBuffer;
+
+            //ALOGD("%s() numBytesActuallyWrittenToWrappingBuffer %d, framesLeftInByteBuffer %d"
+            //      "framesActuallyWrittenToWrappingBuffer %d, numBytesToWriteFromByteBuffer %d"
+            //      "framesWrittenToAudioEndpoint %d"
+            //      , __func__, numBytesActuallyWrittenToWrappingBuffer, framesLeftInByteBuffer,
+            //      framesActuallyWrittenToWrappingBuffer, numBytesToWriteFromByteBuffer,
+            //      framesWrittenToAudioEndpoint);
         }
         partIndex++;
     }
-    int32_t framesWritten = numFrames - framesLeft;
-    mAudioEndpoint->advanceWriteIndex(framesWritten);
+    //ALOGD("%s() framesWrittenToAudioEndpoint %d, numFrames %d"
+    //              "framesLeftInByteBuffer %d"
+    //              , __func__, framesWrittenToAudioEndpoint, numFrames,
+    //              framesLeftInByteBuffer);
 
-    return framesWritten;
+    // The audio endpoint should reference the number of frames written to the wrapping buffer.
+    mAudioEndpoint->advanceWriteIndex(framesWrittenToAudioEndpoint);
+
+    // The internal code should use the number of frames read from the app.
+    return numFrames - framesLeftInByteBuffer;
 }
 
 int64_t AudioStreamInternalPlay::getFramesRead() {
@@ -284,7 +329,6 @@
     return mLastFramesWritten;
 }
 
-
 // Render audio in the application callback and then write the data to the stream.
 void *AudioStreamInternalPlay::callbackLoop() {
     ALOGD("%s() entering >>>>>>>>>>>>>>>", __func__);
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index e761807..b51b5d0 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -21,7 +21,6 @@
 #include <aaudio/AAudio.h>
 
 #include "binding/AAudioServiceInterface.h"
-#include "client/AAudioFlowGraph.h"
 #include "client/AudioStreamInternal.h"
 
 using android::sp;
@@ -89,13 +88,11 @@
      * Asynchronous write with data conversion.
      * @param buffer
      * @param numFrames
-     * @return fdrames written or negative error
+     * @return frames written or negative error
      */
     aaudio_result_t writeNowWithConversion(const void *buffer,
                                            int32_t numFrames);
 
-    AAudioFlowGraph          mFlowGraph;
-
 };
 
 } /* namespace aaudio */
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 8a13a6f..1e27a81 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -571,13 +571,15 @@
 AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getFramesWritten();
+    return audioStream->getFramesWritten() * audioStream->getSampleRate() /
+            audioStream->getDeviceSampleRate();
 }
 
 AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* stream)
 {
     AudioStream *audioStream = convertAAudioStreamToAudioStream(stream);
-    return audioStream->getFramesRead();
+    return audioStream->getFramesRead() * audioStream->getSampleRate() /
+            audioStream->getDeviceSampleRate();
 }
 
 AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* stream,
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 56ef1e6..e0fd325 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -134,7 +134,8 @@
             .set(AMEDIAMETRICS_PROP_ENCODINGHARDWARE,
                     android::toString(getHardwareFormat()).c_str())
             .set(AMEDIAMETRICS_PROP_CHANNELCOUNTHARDWARE, (int32_t)getHardwareSamplesPerFrame())
-            .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate());
+            .set(AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE, (int32_t)getHardwareSampleRate())
+            .set(AMEDIAMETRICS_PROP_SAMPLERATECLIENT, (int32_t)getSampleRate());
 
         if (getDirection() == AAUDIO_DIRECTION_OUTPUT) {
             item.set(AMEDIAMETRICS_PROP_PLAYERIID, mPlayerBase->getPlayerIId());
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 9b4b734..f2f5cac 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -204,10 +204,18 @@
         return mBufferCapacity;
     }
 
+    virtual int32_t getDeviceBufferCapacity() const {
+        return mDeviceBufferCapacity;
+    }
+
     virtual int32_t getFramesPerBurst() const {
         return mFramesPerBurst;
     }
 
+    virtual int32_t getDeviceFramesPerBurst() const {
+        return mDeviceFramesPerBurst;
+    }
+
     virtual int32_t getXRunCount() const {
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
@@ -224,6 +232,10 @@
         return mSampleRate;
     }
 
+    aaudio_result_t getDeviceSampleRate() const {
+        return mDeviceSampleRate;
+    }
+
     aaudio_result_t getHardwareSampleRate() const {
         return mHardwareSampleRate;
     }
@@ -240,6 +252,10 @@
         return mSamplesPerFrame;
     }
 
+    aaudio_result_t getDeviceSamplesPerFrame() const {
+        return mDeviceSamplesPerFrame;
+    }
+
     aaudio_result_t getHardwareSamplesPerFrame() const {
         return mHardwareSamplesPerFrame;
     }
@@ -322,10 +338,10 @@
     }
 
     /**
-     * This is only valid after setChannelMask() and setDeviceFormat() have been called.
+     * This is only valid after setDeviceSamplesPerFrame() and setDeviceFormat() have been called.
      */
     int32_t getBytesPerDeviceFrame() const {
-        return getSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
+        return getDeviceSamplesPerFrame() * audio_bytes_per_sample(getDeviceFormat());
     }
 
     virtual int64_t getFramesWritten() = 0;
@@ -365,6 +381,11 @@
         mSamplesPerFrame = AAudioConvert_channelMaskToCount(channelMask);
     }
 
+    void setDeviceSamplesPerFrame(int32_t deviceSamplesPerFrame) {
+        mDeviceSamplesPerFrame = deviceSamplesPerFrame;
+    }
+
+
     /**
      * @return true if data callback has been specified
      */
@@ -542,6 +563,11 @@
     }
 
     // This should not be called after the open() call.
+    void setDeviceSampleRate(int32_t deviceSampleRate) {
+        mDeviceSampleRate = deviceSampleRate;
+    }
+
+    // This should not be called after the open() call.
     void setHardwareSampleRate(int32_t hardwareSampleRate) {
         mHardwareSampleRate = hardwareSampleRate;
     }
@@ -552,11 +578,21 @@
     }
 
     // This should not be called after the open() call.
+    void setDeviceFramesPerBurst(int32_t deviceFramesPerBurst) {
+        mDeviceFramesPerBurst = deviceFramesPerBurst;
+    }
+
+    // This should not be called after the open() call.
     void setBufferCapacity(int32_t bufferCapacity) {
         mBufferCapacity = bufferCapacity;
     }
 
     // This should not be called after the open() call.
+    void setDeviceBufferCapacity(int32_t deviceBufferCapacity) {
+        mDeviceBufferCapacity = deviceBufferCapacity;
+    }
+
+    // This should not be called after the open() call.
     void setSharingMode(aaudio_sharing_mode_t sharingMode) {
         mSharingMode = sharingMode;
     }
@@ -721,9 +757,11 @@
 
     // These do not change after open().
     int32_t                     mSamplesPerFrame = AAUDIO_UNSPECIFIED;
+    int32_t                     mDeviceSamplesPerFrame = AAUDIO_UNSPECIFIED;
     int32_t                     mHardwareSamplesPerFrame = AAUDIO_UNSPECIFIED;
     aaudio_channel_mask_t       mChannelMask = AAUDIO_UNSPECIFIED;
     int32_t                     mSampleRate = AAUDIO_UNSPECIFIED;
+    int32_t                     mDeviceSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mHardwareSampleRate = AAUDIO_UNSPECIFIED;
     int32_t                     mDeviceId = AAUDIO_UNSPECIFIED;
     aaudio_sharing_mode_t       mSharingMode = AAUDIO_SHARING_MODE_SHARED;
@@ -732,7 +770,9 @@
     audio_format_t              mHardwareFormat = AUDIO_FORMAT_DEFAULT;
     aaudio_performance_mode_t   mPerformanceMode = AAUDIO_PERFORMANCE_MODE_NONE;
     int32_t                     mFramesPerBurst = 0;
+    int32_t                     mDeviceFramesPerBurst = 0;
     int32_t                     mBufferCapacity = 0;
+    int32_t                     mDeviceBufferCapacity = 0;
 
     aaudio_usage_t              mUsage           = AAUDIO_UNSPECIFIED;
     aaudio_content_type_t       mContentType     = AAUDIO_UNSPECIFIED;
diff --git a/media/libaaudio/src/core/VersionExperiment.txt b/media/libaaudio/src/core/VersionExperiment.txt
deleted file mode 100644
index 071239b..0000000
--- a/media/libaaudio/src/core/VersionExperiment.txt
+++ /dev/null
@@ -1,55 +0,0 @@
-
-// TODO Experiment with versioning. This may be removed or changed dramatically.
-// Please ignore for now. Do not review.
-#define OBOE_VERSION_EXPERIMENT  0
-#if OBOE_VERSION_EXPERIMENT
-
-#define OBOE_EARLIEST_SUPPORTED_VERSION  1
-#define OBOE_CURRENT_VERSION  2
-
-typedef struct OboeInterface_s {
-    int32_t size; // do not use size_t because its size can vary
-    int32_t version;
-    int32_t reserved1;
-    void *  reserved2;
-    oboe_result_t (*createStreamBuilder)(OboeStreamBuilder *);
-} OboeInterface_t;
-
-OboeInterface_t s_oboe_template = {
-        .size = sizeof(OboeInterface_t),
-        .version = OBOE_CURRENT_VERSION,
-        .reserved1 = 0,
-        .reserved2 = NULL,
-        .createStreamBuilder = Oboe_createStreamBuilder
-};
-
-oboe_result_t Oboe_Unimplemented(OboeInterface_t *oboe) {
-    (void) oboe;
-    return OBOE_ERROR_UNIMPLEMENTED;
-}
-
-typedef oboe_result_t (*OboeFunction_t)(OboeInterface_t *oboe);
-
-int32_t Oboe_Initialize(OboeInterface_t *oboe, uint32_t flags) {
-    if (oboe->version < OBOE_EARLIEST_SUPPORTED_VERSION) {
-        return OBOE_ERROR_INCOMPATIBLE;
-    }
-    // Fill in callers vector table.
-    uint8_t *start = (uint8_t*)&oboe->reserved1;
-    uint8_t *end;
-    if (oboe->size <= s_oboe_template.size) {
-        end = ((uint8_t *)oboe) + oboe->size;
-    } else {
-        end = ((uint8_t *)oboe) + s_oboe_template.size;
-        // Assume the rest of the structure is vectors.
-        // Point them all to OboeInternal_Unimplemented()
-        // Point to first vector past end of the known structure.
-        OboeFunction_t *next = (OboeFunction_t*)end;
-        while ((((uint8_t *)next) - ((uint8_t *)oboe)) < oboe->size) {
-            *next++ = Oboe_Unimplemented;
-        }
-    }
-    memcpy(&oboe->reserved1, &s_oboe_template.reserved1, end - start);
-    return OBOE_OK;
-}
-#endif /* OBOE_VERSION_EXPERIMENT -------------------------- */
diff --git a/media/libaaudio/src/fifo/FifoControllerBase.cpp b/media/libaaudio/src/fifo/FifoControllerBase.cpp
index ad6d041..e79bf96 100644
--- a/media/libaaudio/src/fifo/FifoControllerBase.cpp
+++ b/media/libaaudio/src/fifo/FifoControllerBase.cpp
@@ -21,7 +21,8 @@
 #include <stdint.h>
 #include "FifoControllerBase.h"
 
-using namespace android;  // TODO just import names needed
+using android::FifoControllerBase;
+using android::fifo_frames_t;
 
 FifoControllerBase::FifoControllerBase(fifo_frames_t capacity, fifo_frames_t threshold)
         : mCapacity(capacity)
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
index a3ce58c..611ddcd 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.cpp
@@ -135,10 +135,9 @@
     int coefficientIndex = 0;
     double phase = 0.0; // ranges from 0.0 to 1.0, fraction between samples
     // Stretch the sinc function for low pass filtering.
-    const float cutoffScaler = normalizedCutoff *
-            ((outputRate < inputRate)
-             ? ((float)outputRate / inputRate)
-             : ((float)inputRate / outputRate));
+    const float cutoffScaler = (outputRate < inputRate)
+             ? (normalizedCutoff * (float)outputRate / inputRate)
+             : 1.0f; // Do not filter when upsampling.
     const int numTapsHalf = getNumTaps() / 2; // numTaps must be even.
     const float numTapsHalfInverse = 1.0f / numTapsHalf;
     for (int i = 0; i < numRows; i++) {
diff --git a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
index 717f3fd..9e47335 100644
--- a/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
+++ b/media/libaaudio/src/flowgraph/resampler/MultiChannelResampler.h
@@ -111,6 +111,9 @@
          * Set lower to reduce aliasing.
          * Default is 0.70.
          *
+         * Note that this value is ignored when upsampling, which is when
+         * the outputRate is higher than the inputRate.
+         *
          * @param normalizedCutoff anti-aliasing filter cutoff
          * @return address of this builder for chaining calls
          */
@@ -227,6 +230,10 @@
 
     /**
      * Generate the filter coefficients in optimal order.
+     *
+     * Note that normalizedCutoff is ignored when upsampling, which is when
+     * the outputRate is higher than the inputRate.
+     *
      * @param inputRate sample rate of the input stream
      * @param outputRate  sample rate of the output stream
      * @param numRows number of rows in the array that contain a set of tap coefficients
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index e760dab..fe4bf2c 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -208,6 +208,12 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    // Use the same values for device values.
+    setDeviceSamplesPerFrame(getSamplesPerFrame());
+    setDeviceSampleRate(mAudioRecord->getSampleRate());
+    setDeviceBufferCapacity(getBufferCapacityFromDevice());
+    setDeviceFramesPerBurst(getFramesPerBurstFromDevice());
+
     setHardwareSamplesPerFrame(mAudioRecord->getHalChannelCount());
     setHardwareSampleRate(mAudioRecord->getHalSampleRate());
     setHardwareFormat(mAudioRecord->getHalFormat());
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 67ee42e..59fdabc 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -203,6 +203,12 @@
     setBufferCapacity(getBufferCapacityFromDevice());
     setFramesPerBurst(getFramesPerBurstFromDevice());
 
+    // Use the same values for device values.
+    setDeviceSamplesPerFrame(getSamplesPerFrame());
+    setDeviceSampleRate(mAudioTrack->getSampleRate());
+    setDeviceBufferCapacity(getBufferCapacityFromDevice());
+    setDeviceFramesPerBurst(getFramesPerBurstFromDevice());
+
     setHardwareSamplesPerFrame(mAudioTrack->getHalChannelCount());
     setHardwareSampleRate(mAudioTrack->getHalSampleRate());
     setHardwareFormat(mAudioTrack->getHalFormat());
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index e8324a8..0cbf79d 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -383,11 +383,10 @@
                 return AUDIO_CHANNEL_OUT_7POINT1POINT2;
             case AAUDIO_CHANNEL_7POINT1POINT4:
                 return AUDIO_CHANNEL_OUT_7POINT1POINT4;
-            // TODO: add 9point1point4 and 9point1point6 when they are added in audio-hal-enums.h
-            // case AAUDIO_CHANNEL_9POINT1POINT4:
-            //     return AUDIO_CHANNEL_OUT_9POINT1POINT4;
-            // case AAUDIO_CHANNEL_9POINT1POINT6:
-            //     return AUDIO_CHANNEL_OUT_9POINT1POINT6;
+            case AAUDIO_CHANNEL_9POINT1POINT4:
+                return AUDIO_CHANNEL_OUT_9POINT1POINT4;
+            case AAUDIO_CHANNEL_9POINT1POINT6:
+                return AUDIO_CHANNEL_OUT_9POINT1POINT6;
             default:
                 ALOGE("%s() %#x unrecognized", __func__, channelMask);
                 return AUDIO_CHANNEL_INVALID;
@@ -465,11 +464,10 @@
                 return AAUDIO_CHANNEL_7POINT1POINT2;
             case AUDIO_CHANNEL_OUT_7POINT1POINT4:
                 return AAUDIO_CHANNEL_7POINT1POINT4;
-            // TODO: add 9point1point4 and 9point1point6 when they are added in audio-hal-enums.h
-            // case AUDIO_CHANNEL_OUT_9POINT1POINT4:
-            //     return AAUDIO_CHANNEL_9POINT1POINT4;
-            // case AUDIO_CHANNEL_OUT_9POINT1POINT6:
-            //     return AAUDIO_CHANNEL_9POINT1POINT6;
+            case AUDIO_CHANNEL_OUT_9POINT1POINT4:
+                return AAUDIO_CHANNEL_9POINT1POINT4;
+            case AUDIO_CHANNEL_OUT_9POINT1POINT6:
+                return AAUDIO_CHANNEL_9POINT1POINT6;
             default:
                 ALOGE("%s() %#x unrecognized", __func__, channelMask);
                 return AAUDIO_CHANNEL_INVALID;
diff --git a/media/libaaudio/src/utility/AudioClock.h b/media/libaaudio/src/utility/AudioClock.h
index d5d4ef4..37f5b39 100644
--- a/media/libaaudio/src/utility/AudioClock.h
+++ b/media/libaaudio/src/utility/AudioClock.h
@@ -33,7 +33,7 @@
 public:
     static int64_t getNanoseconds(clockid_t clockId = CLOCK_MONOTONIC) {
         struct timespec time;
-        int result = clock_gettime(clockId, &time);
+        const int result = clock_gettime(clockId, &time);
         if (result < 0) {
             return -errno;
         }
@@ -56,7 +56,7 @@
             time.tv_sec = nanoTime / AAUDIO_NANOS_PER_SECOND;
             // Calculate the fractional nanoseconds. Avoids expensive % operation.
             time.tv_nsec = nanoTime - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
-            int err = clock_nanosleep(clockId, TIMER_ABSTIME, &time, nullptr);
+            const int err = clock_nanosleep(clockId, TIMER_ABSTIME, &time, nullptr);
             switch (err) {
             case EINTR:
                 return 1;
@@ -86,7 +86,7 @@
             // Calculate the fractional nanoseconds. Avoids expensive % operation.
             time.tv_nsec = nanoseconds - (time.tv_sec * AAUDIO_NANOS_PER_SECOND);
             const int flags = 0; // documented as relative sleep
-            int err = clock_nanosleep(clockId, flags, &time, nullptr);
+            const int err = clock_nanosleep(clockId, flags, &time, nullptr);
             switch (err) {
             case EINTR:
                 return 1;
diff --git a/media/libaaudio/src/utility/MonotonicCounter.h b/media/libaaudio/src/utility/MonotonicCounter.h
index 51eb69b..b58634f 100644
--- a/media/libaaudio/src/utility/MonotonicCounter.h
+++ b/media/libaaudio/src/utility/MonotonicCounter.h
@@ -104,7 +104,7 @@
      */
     void roundUp64(int32_t period) {
         if (period > 0) {
-            int64_t numPeriods = (mCounter64 + period - 1) / period;
+            const int64_t numPeriods = (mCounter64 + period - 1) / period;
             mCounter64 = numPeriods * period;
         }
     }
diff --git a/media/libaaudio/tests/Android.bp b/media/libaaudio/tests/Android.bp
index 24041bc..0cfdfb2 100644
--- a/media/libaaudio/tests/Android.bp
+++ b/media/libaaudio/tests/Android.bp
@@ -145,6 +145,7 @@
     srcs: ["test_flowgraph.cpp"],
     shared_libs: [
         "libaaudio_internal",
+        "libaudioutils",
         "libbinder",
         "libcutils",
         "libutils",
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 6f75f5a..7eb8b0d 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -25,6 +25,8 @@
 
 #include <gtest/gtest.h>
 
+#include <aaudio/AAudio.h>
+#include "client/AAudioFlowGraph.h"
 #include "flowgraph/ClipToRange.h"
 #include "flowgraph/Limiter.h"
 #include "flowgraph/MonoBlend.h"
@@ -37,8 +39,18 @@
 #include "flowgraph/SinkI32.h"
 #include "flowgraph/SourceI16.h"
 #include "flowgraph/SourceI24.h"
+#include "flowgraph/resampler/IntegerRatio.h"
 
 using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
+using namespace RESAMPLER_OUTER_NAMESPACE::resampler;
+
+using TestFlowgraphResamplerParams = std::tuple<int32_t, int32_t, MultiChannelResampler::Quality>;
+
+enum {
+    PARAM_SOURCE_SAMPLE_RATE = 0,
+    PARAM_SINK_SAMPLE_RATE,
+    PARAM_RESAMPLER_QUALITY
+};
 
 constexpr int kBytesPerI24Packed = 3;
 
@@ -394,3 +406,240 @@
         EXPECT_NEAR(expected[i], output[i], tolerance);
     }
 }
+
+TEST(test_flowgraph, module_sinki16_multiple_reads) {
+    static constexpr int kNumSamples = 8;
+    std::array<int16_t, kNumSamples + 10> output; // larger than input
+
+    SourceFloat sourceFloat{1};
+    SinkI16 sinkI16{1};
+
+    sourceFloat.setData(kInputFloat.data(), kNumSamples);
+    sourceFloat.output.connect(&sinkI16.input);
+
+    output.fill(777);
+
+    // Read the first half of the data
+    int32_t numRead = sinkI16.read(output.data(), kNumSamples / 2);
+    ASSERT_EQ(kNumSamples / 2, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+    }
+
+    // Read the rest of the data
+    numRead = sinkI16.read(output.data(), output.size());
+    ASSERT_EQ(kNumSamples / 2, numRead);
+    for (int i = 0; i < numRead; i++) {
+        EXPECT_EQ(kExpectedI16.at(i + kNumSamples / 2), output.at(i)) << ", i = " << i;
+    }
+}
+
+void checkSampleRateConversionVariedSizes(int32_t sourceSampleRate,
+                    int32_t sinkSampleRate,
+                    MultiChannelResampler::Quality resamplerQuality) {
+    AAudioFlowGraph flowgraph;
+    aaudio_result_t result = flowgraph.configure(AUDIO_FORMAT_PCM_FLOAT /* sourceFormat */,
+            1 /* sourceChannelCount */,
+            sourceSampleRate,
+            AUDIO_FORMAT_PCM_FLOAT /* sinkFormat */,
+            1 /* sinkChannelCount */,
+            sinkSampleRate,
+            false /* useMonoBlend */,
+            false /* useVolumeRamps */,
+            0.0f /* audioBalance */,
+            resamplerQuality);
+
+    IntegerRatio ratio(sourceSampleRate, sinkSampleRate);
+    ratio.reduce();
+
+    ASSERT_EQ(AAUDIO_OK, result);
+
+    const int inputSize = ratio.getNumerator();
+    const int outputSize = ratio.getDenominator();
+    float input[inputSize];
+    float output[outputSize];
+
+    for (int i = 0; i < inputSize; i++) {
+        input[i] = i * 1.0f / inputSize;
+    }
+
+    int inputUsed = 0;
+    int outputRead = 0;
+    int curInputSize = 1;
+
+    // Process the data with larger and larger input buffer sizes.
+    while (inputUsed < inputSize) {
+        outputRead += flowgraph.process((void *) (input + inputUsed),
+                curInputSize,
+                (void *) (output + outputRead),
+                outputSize - outputRead);
+        inputUsed += curInputSize;
+        curInputSize = std::min(curInputSize + 5, inputSize - inputUsed);
+    }
+
+    ASSERT_EQ(outputSize, outputRead);
+
+    for (int i = 1; i < outputSize; i++) {
+        // The first values of the flowgraph will be close to zero.
+        // Besides those, the values should be strictly increasing.
+        if (output[i - 1] > 0.01f) {
+            EXPECT_GT(output[i], output[i - 1]);
+        }
+    }
+}
+
+TEST(test_flowgraph, flowgraph_varied_sizes_all) {
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
+    const MultiChannelResampler::Quality qualities[] =
+    {
+        MultiChannelResampler::Quality::Fastest,
+        MultiChannelResampler::Quality::Low,
+        MultiChannelResampler::Quality::Medium,
+        MultiChannelResampler::Quality::High,
+        MultiChannelResampler::Quality::Best
+    };
+    for (int srcRate : rates) {
+        for (int destRate : rates) {
+            for (auto quality : qualities) {
+                if (srcRate != destRate) {
+                    checkSampleRateConversionVariedSizes(srcRate, destRate, quality);
+                }
+            }
+        }
+    }
+}
+
+void checkSampleRateConversionPullLater(int32_t sourceSampleRate,
+                    int32_t sinkSampleRate,
+                    MultiChannelResampler::Quality resamplerQuality) {
+    AAudioFlowGraph flowgraph;
+    aaudio_result_t result = flowgraph.configure(AUDIO_FORMAT_PCM_FLOAT /* sourceFormat */,
+            1 /* sourceChannelCount */,
+            sourceSampleRate,
+            AUDIO_FORMAT_PCM_FLOAT /* sinkFormat */,
+            1 /* sinkChannelCount */,
+            sinkSampleRate,
+            false /* useMonoBlend */,
+            false /* useVolumeRamps */,
+            0.0f /* audioBalance */,
+            resamplerQuality);
+
+    IntegerRatio ratio(sourceSampleRate, sinkSampleRate);
+    ratio.reduce();
+
+    ASSERT_EQ(AAUDIO_OK, result);
+
+    const int inputSize = ratio.getNumerator();
+    const int outputSize = ratio.getDenominator();
+    float input[inputSize];
+    float output[outputSize];
+
+    for (int i = 0; i < inputSize; i++) {
+        input[i] = i * 1.0f / inputSize;
+    }
+
+    // Read half the data with process.
+    int outputRead = flowgraph.process((void *) input,
+            inputSize,
+            (void *) output,
+            outputSize / 2);
+
+    ASSERT_EQ(outputSize / 2, outputRead);
+
+    // Now read the other half of the data with pull.
+    outputRead += flowgraph.pull(
+            (void *) (output + outputRead),
+            outputSize - outputRead);
+
+    ASSERT_EQ(outputSize, outputRead);
+    for (int i = 1; i < outputSize; i++) {
+        // The first values of the flowgraph will be close to zero.
+        // Besides those, the values should be strictly increasing.
+        if (output[i - 1] > 0.01f) {
+            EXPECT_GT(output[i], output[i - 1]);
+        }
+    }
+}
+
+// TODO: b/289508408 - Remove non-parameterized tests if they get noisy.
+TEST(test_flowgraph, flowgraph_pull_later_all) {
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
+    const MultiChannelResampler::Quality qualities[] =
+    {
+        MultiChannelResampler::Quality::Fastest,
+        MultiChannelResampler::Quality::Low,
+        MultiChannelResampler::Quality::Medium,
+        MultiChannelResampler::Quality::High,
+        MultiChannelResampler::Quality::Best
+    };
+    for (int srcRate : rates) {
+        for (int destRate : rates) {
+            for (auto quality : qualities) {
+                if (srcRate != destRate) {
+                    checkSampleRateConversionPullLater(srcRate, destRate, quality);
+                }
+            }
+        }
+    }
+}
+
+class TestFlowgraphSampleRateConversion : public ::testing::Test,
+                        public ::testing::WithParamInterface<TestFlowgraphResamplerParams> {
+};
+
+const char* resamplerQualityToString(MultiChannelResampler::Quality quality) {
+    switch (quality) {
+        case MultiChannelResampler::Quality::Fastest: return "FASTEST";
+        case MultiChannelResampler::Quality::Low: return "LOW";
+        case MultiChannelResampler::Quality::Medium: return "MEDIUM";
+        case MultiChannelResampler::Quality::High: return "HIGH";
+        case MultiChannelResampler::Quality::Best: return "BEST";
+    }
+    return "UNKNOWN";
+}
+
+static std::string getTestName(
+        const ::testing::TestParamInfo<TestFlowgraphResamplerParams>& info) {
+    return std::string()
+            + std::to_string(std::get<PARAM_SOURCE_SAMPLE_RATE>(info.param))
+            + "__" + std::to_string(std::get<PARAM_SINK_SAMPLE_RATE>(info.param))
+            + "__" + resamplerQualityToString(std::get<PARAM_RESAMPLER_QUALITY>(info.param));
+}
+
+TEST_P(TestFlowgraphSampleRateConversion, test_flowgraph_pull_later) {
+    checkSampleRateConversionPullLater(std::get<PARAM_SOURCE_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_SINK_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_RESAMPLER_QUALITY>(GetParam()));
+}
+
+TEST_P(TestFlowgraphSampleRateConversion, test_flowgraph_varied_sizes) {
+    checkSampleRateConversionVariedSizes(std::get<PARAM_SOURCE_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_SINK_SAMPLE_RATE>(GetParam()),
+            std::get<PARAM_RESAMPLER_QUALITY>(GetParam()));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+        test_flowgraph,
+        TestFlowgraphSampleRateConversion,
+        ::testing::Values(
+                TestFlowgraphResamplerParams({8000, 11025, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({8000, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({8000, 44100, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 24000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 48000,
+                        MultiChannelResampler::Quality::Fastest}),
+                TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::Low}),
+                TestFlowgraphResamplerParams({11025, 48000,
+                        MultiChannelResampler::Quality::Medium}),
+                TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::High}),
+                TestFlowgraphResamplerParams({11025, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 44100, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({11025, 88200, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({16000, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({44100, 48000, MultiChannelResampler::Quality::Low}),
+                TestFlowgraphResamplerParams({44100, 48000, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({48000, 11025, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({48000, 44100, MultiChannelResampler::Quality::Best}),
+                TestFlowgraphResamplerParams({44100, 11025, MultiChannelResampler::Quality::Best})),
+        &getTestName
+);
diff --git a/media/libaaudio/tests/test_resampler.cpp b/media/libaaudio/tests/test_resampler.cpp
index 1e4f59c..13e4a20 100644
--- a/media/libaaudio/tests/test_resampler.cpp
+++ b/media/libaaudio/tests/test_resampler.cpp
@@ -101,14 +101,20 @@
         }
     }
 
+    // Flush out remaining frames from the flowgraph
+    while (!mcResampler->isWriteNeeded()) {
+        mcResampler->readNextFrame(output);
+        output++;
+        numRead++;
+    }
+
     ASSERT_LE(numRead, kNumOutputSamples);
     // Some frames are lost priming the FIR filter.
-    const int kMaxAlgorithmicFrameLoss = 16;
+    const int kMaxAlgorithmicFrameLoss = 5;
     EXPECT_GT(numRead, kNumOutputSamples - kMaxAlgorithmicFrameLoss);
 
     int sinkZeroCrossingCount = countZeroCrossingsWithHysteresis(outputBuffer.get(), numRead);
-    // Some cycles may get chopped off at the end.
-    const int kMaxZeroCrossingDelta = 3;
+    const int kMaxZeroCrossingDelta = std::max(sinkRate / sourceRate / 2, 1);
     EXPECT_LE(abs(sourceZeroCrossingCount - sinkZeroCrossingCount), kMaxZeroCrossingDelta);
 
     // Detect glitches by looking for spikes in the second derivative.
@@ -136,8 +142,7 @@
 
 
 TEST(test_resampler, resampler_scan_all) {
-    // TODO Add 64000, 88200, 96000 when they work. Failing now.
-    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000};
+    const int rates[] = {8000, 11025, 22050, 32000, 44100, 48000, 64000, 88200, 96000};
     const MultiChannelResampler::Quality qualities[] =
     {
         MultiChannelResampler::Quality::Fastest,
@@ -193,10 +198,9 @@
     checkResampler(11025, 44100, MultiChannelResampler::Quality::Best);
 }
 
-// TODO This fails because the output is very low.
-//TEST(test_resampler, resampler_11025_88200_best) {
-//    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
-//}
+TEST(test_resampler, resampler_11025_88200_best) {
+    checkResampler(11025, 88200, MultiChannelResampler::Quality::Best);
+}
 
 TEST(test_resampler, resampler_16000_48000_best) {
     checkResampler(16000, 48000, MultiChannelResampler::Quality::Best);
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 7cec2e8..763de70 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -108,7 +108,7 @@
 }
 
 // establish binder interface to AudioFlinger service
-const sp<IAudioFlinger> AudioSystem::get_audio_flinger() {
+const sp<IAudioFlinger> AudioSystem::getAudioFlingerImpl(bool canStartThreadPool = true) {
     sp<IAudioFlinger> af;
     sp<AudioFlingerClient> afc;
     bool reportNoError = false;
@@ -132,12 +132,10 @@
                 binder = gAudioFlingerBinder;
             } else {
                 sp<IServiceManager> sm = defaultServiceManager();
-                do {
-                    binder = sm->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
-                    if (binder != nullptr) break;
-                    ALOGW("AudioFlinger not published, waiting...");
-                    usleep(500000); // 0.5 s
-                } while (true);
+                binder = sm->waitForService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+                if (binder == nullptr) {
+                    return nullptr;
+                }
             }
             binder->linkToDeath(gAudioFlingerClient);
             const auto afs = interface_cast<media::IAudioFlingerService>(binder);
@@ -147,7 +145,9 @@
         afc = gAudioFlingerClient;
         af = gAudioFlinger;
         // Make sure callbacks can be received by gAudioFlingerClient
-        ProcessState::self()->startThreadPool();
+        if(canStartThreadPool) {
+            ProcessState::self()->startThreadPool();
+        }
     }
     const int64_t token = IPCThreadState::self()->clearCallingIdentity();
     af->registerClient(afc);
@@ -156,6 +156,14 @@
     return af;
 }
 
+const sp<IAudioFlinger> AudioSystem:: get_audio_flinger() {
+    return getAudioFlingerImpl();
+}
+
+const sp<IAudioFlinger> AudioSystem:: get_audio_flinger_for_fuzzer() {
+    return getAudioFlingerImpl(false);
+}
+
 const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() {
     // calling get_audio_flinger() will initialize gAudioFlingerClient if needed
     const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
@@ -870,14 +878,10 @@
         Mutex::Autolock _l(gLockAPS);
         if (gAudioPolicyService == 0) {
             sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder;
-            do {
-                binder = sm->getService(String16("media.audio_policy"));
-                if (binder != 0)
-                    break;
-                ALOGW("AudioPolicyService not published, waiting...");
-                usleep(500000); // 0.5 s
-            } while (true);
+            sp<IBinder> binder = sm->waitForService(String16("media.audio_policy"));
+            if (binder == nullptr) {
+                return nullptr;
+            }
             if (gAudioPolicyServiceClient == NULL) {
                 gAudioPolicyServiceClient = new AudioPolicyServiceClient();
             }
@@ -2093,8 +2097,7 @@
         return BAD_VALUE;
     }
 
-    const sp<IAudioPolicyService>
-            & aps = AudioSystem::get_audio_policy_service();
+    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
     if (aps == 0) return PERMISSION_DENIED;
 
     std::vector<AudioFormatDescription> formatsAidl;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 359c140..4269aa2 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -817,7 +817,7 @@
     (void) updateAndGetPosition_l();
 
     // save start timestamp
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         if (getTimestamp_l(mStartTs) != OK) {
             mStartTs.mPosition = 0;
         }
@@ -838,7 +838,7 @@
         mTimestampStaleTimeReported = false;
         mPreviousLocation = ExtendedTimestamp::LOCATION_INVALID;
 
-        if (!isOffloadedOrDirect_l()
+        if (!isAfTrackOffloadedOrDirect_l()
                 && mStartEts.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] > 0) {
             // Server side has consumed something, but is it finished consuming?
             // It is possible since flush and stop are asynchronous that the server
@@ -1917,6 +1917,7 @@
     mAfChannelCount = audio_channel_count_from_out_mask(output.afChannelMask);
     mAfFormat = output.afFormat;
     mAfLatency = output.afLatencyMs;
+    mAfTrackFlags = output.afTrackFlags;
 
     mLatency = mAfLatency + (1000LL * mFrameCount) / mSampleRate;
 
@@ -3182,7 +3183,7 @@
     // To avoid a race, read the presented frames first.  This ensures that presented <= consumed.
 
     status_t status;
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         // use Binder to get timestamp
         media::AudioTimestampInternal ts;
         mAudioTrack->getTimestamp(&ts, &status);
@@ -3294,7 +3295,7 @@
         ALOGV_IF(status != WOULD_BLOCK, "%s(%d): getTimestamp error:%#x", __func__, mPortId, status);
         return status;
     }
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         if (isOffloaded_l() && (mState == STATE_PAUSED || mState == STATE_PAUSED_STOPPING)) {
             // use cached paused position in case another offloaded track is running.
             timestamp.mPosition = mPausedPosition;
@@ -3740,7 +3741,7 @@
     // This is conservatively figured - if we encounter an unexpected error
     // then we will not wait.
     bool wait = false;
-    if (isOffloadedOrDirect_l()) {
+    if (isAfTrackOffloadedOrDirect_l()) {
         AudioTimestamp ts;
         status_t status = getTimestamp_l(ts);
         if (status == WOULD_BLOCK) {
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 4bd12b8..515e708 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -116,6 +116,8 @@
             legacy2aidl_audio_channel_mask_t_AudioChannelLayout(afChannelMask, false /*isInput*/));
     aidl.afFormat = VALUE_OR_RETURN(
             legacy2aidl_audio_format_t_AudioFormatDescription(afFormat));
+    aidl.afTrackFlags = VALUE_OR_RETURN(
+            legacy2aidl_audio_output_flags_t_int32_t_mask(afTrackFlags));
     aidl.outputId = VALUE_OR_RETURN(legacy2aidl_audio_io_handle_t_int32_t(outputId));
     aidl.portId = VALUE_OR_RETURN(legacy2aidl_audio_port_handle_t_int32_t(portId));
     aidl.audioTrack = audioTrack;
@@ -144,6 +146,8 @@
                                                                 false /*isInput*/));
     legacy.afFormat = VALUE_OR_RETURN(
             aidl2legacy_AudioFormatDescription_audio_format_t(aidl.afFormat));
+    legacy.afTrackFlags = VALUE_OR_RETURN(
+            aidl2legacy_int32_t_audio_output_flags_t_mask(aidl.afTrackFlags));
     legacy.outputId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_io_handle_t(aidl.outputId));
     legacy.portId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_port_handle_t(aidl.portId));
     legacy.audioTrack = aidl.audioTrack;
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index 42e0bb4..ab60461 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -43,6 +43,7 @@
     AudioChannelLayout afChannelMask;
     AudioFormatDescription afFormat;
     int afLatencyMs;
+    int afTrackFlags;
     /** Interpreted as audio_io_handle_t. */
     int outputId;
     /** Interpreted as audio_port_handle_t. */
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
new file mode 100644
index 0000000..67258d9
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+cc_defaults {
+    name: "libaudioclient_aidl_fuzzer_defaults",
+    static_libs: [
+        "android.hardware.audio.common@7.0-enums",
+        "effect-aidl-cpp",
+        "liblog",
+        "libcgrouprc",
+        "libcgrouprc_format",
+        "libjsoncpp",
+        "libmediametricsservice",
+        "libmedia_helper",
+        "libprocessgroup",
+        "shared-file-region-aidl-cpp",
+        "libfakeservicemanager"
+    ],
+    shared_libs: [
+        "libaudioclient",
+        "libaudioflinger",
+        "libmediautils",
+        "libnblog",
+        "libaudioprocessing",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+        "android.hardware.audio.common-util",
+        "audioclient-types-aidl-cpp",
+        "audioflinger-aidl-cpp",
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "capture_state_listener-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libaudiofoundation",
+        "libaudiomanager",
+        "libaudiopolicy",
+        "libaudioutils",
+        "libdl",
+        "libxml2",
+        "mediametricsservice-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libvndksupport",
+        "libmediametrics",
+        "libfakeservicemanager",
+        "libactivitymanager_aidl",
+        "libheadtracking",
+        "libaudiopolicyservice",
+        "libsensorprivacy",
+        "libaudiopolicymanagerdefault",
+        "libaudiohal",
+        "libhidlbase",
+        "libpermission",
+        "libaudiohal@7.0",
+    ],
+    header_libs: [
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libaudiofoundation_headers",
+        "libmedia_headers",
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "mediautils_headers",
+    ],
+     fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudioflinger",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audioflinger_aidl_fuzzer",
+    srcs: ["audioflinger_aidl_fuzzer.cpp"],
+    defaults: [
+        "libaudioclient_aidl_fuzzer_defaults",
+        "service_fuzzer_defaults"
+    ],
+}
diff --git a/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
new file mode 100644
index 0000000..fac5f53
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/audioflinger_aidl_fuzzer.cpp
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+#include <AudioFlinger.h>
+#include <ISchedulingPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <binder/IActivityManager.h>
+#include <binder/IPermissionController.h>
+#include <binder/IServiceManager.h>
+#include <binder/PermissionController.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <mediautils/SchedulingPolicyService.h>
+#include <sensorprivacy/SensorPrivacyManager.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using android::fuzzService;
+
+static sp<media::IAudioFlingerService> gAudioFlingerService;
+
+class FuzzerSchedulingPolicyService : public BnInterface<ISchedulingPolicyService> {
+    int32_t requestPriority(int32_t /*pid_t*/, int32_t /*tid*/, int32_t /*prio*/, bool /*isForApp*/,
+                            bool /*asynchronous*/) {
+        return 0;
+    }
+
+    int32_t requestCpusetBoost(bool /*enable*/, const sp<IBinder>& /*client*/) { return 0; }
+};
+
+class FuzzerPermissionController : public BnInterface<IPermissionController> {
+  public:
+    bool checkPermission(const String16& /*permission*/, int32_t /*pid*/, int32_t /*uid*/) {
+        return true;
+    }
+    int32_t noteOp(const String16& /*op*/, int32_t /*uid*/, const String16& /*packageName*/) {
+        return 0;
+    }
+    void getPackagesForUid(const uid_t /*uid*/, Vector<String16>& /*packages*/) {}
+    bool isRuntimePermission(const String16& /*permission*/) { return true; }
+    int32_t getPackageUid(const String16& /*package*/, int /*flags*/) { return 0; }
+};
+
+class FuzzerSensorPrivacyManager : public BnInterface<hardware::ISensorPrivacyManager> {
+  public:
+    Status supportsSensorToggle(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status addSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status addToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status removeSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status removeToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    Status isSensorPrivacyEnabled(bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isCombinedToggleSensorPrivacyEnabled(int32_t /*sensor*/,
+                                                bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status isToggleSensorPrivacyEnabled(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                        bool* /*_aidl_return*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setSensorPrivacy(bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacy(int32_t /*userId*/, int32_t /*source*/, int32_t /*sensor*/,
+                                  bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    Status setToggleSensorPrivacyForProfileGroup(int32_t /*userId*/, int32_t /*source*/,
+                                                 int32_t /*sensor*/, bool /*enable*/) override {
+        return Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+};
+
+class FuzzerActivityManager : public BnInterface<IActivityManager> {
+  public:
+    int32_t openContentUri(const String16& /*stringUri*/) override { return 0; }
+
+    status_t registerUidObserver(const sp<IUidObserver>& /*observer*/, const int32_t /*event*/,
+                                 const int32_t /*cutpoint*/,
+                                 const String16& /*callingPackage*/) override {
+        return OK;
+    }
+
+    status_t unregisterUidObserver(const sp<IUidObserver>& /*observer*/) override { return OK; }
+
+    bool isUidActive(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return true;
+    }
+
+    int32_t getUidProcessState(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return ActivityManager::PROCESS_STATE_UNKNOWN;
+    }
+
+    status_t checkPermission(const String16& /*permission*/, const pid_t /*pid*/,
+                             const uid_t /*uid*/, int32_t* /*outResult*/) override {
+        return NO_ERROR;
+    }
+
+    status_t registerUidObserverForUids(const sp<IUidObserver>& /*observer*/ ,
+                                        const int32_t /*event*/ ,
+                                        const int32_t /*cutpoint*/ ,
+                                        const String16& /*callingPackage*/ ,
+                                        const int32_t uids[] ,
+                                        size_t /*nUids*/ ,
+                                        /*out*/ sp<IBinder>& /*observerToken*/ ) {
+        (void)uids;
+        return OK;
+    }
+
+    status_t addUidToObserver(const sp<IBinder>& /*observerToken*/ ,
+                              const String16& /*callingPackage*/ ,
+                              int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t removeUidFromObserver(const sp<IBinder>& /*observerToken*/ ,
+                                   const String16& /*callingPackage*/ ,
+                                   int32_t /*uid*/ ) override {
+        return NO_ERROR;
+    }
+
+    status_t logFgsApiBegin(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                            int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiEnd(int32_t /*apiType*/ , int32_t /*appUid*/ ,
+                          int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+    status_t logFgsApiStateChanged(int32_t /*apiType*/ , int32_t /*state*/ ,
+                                   int32_t /*appUid*/ ,
+                                   int32_t /*appPid*/ ) override {
+        return NO_ERROR;
+    }
+};
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+    /* Create a FakeServiceManager instance and add required services */
+    sp<FakeServiceManager> fakeServiceManager = new FakeServiceManager();
+    setDefaultServiceManager(fakeServiceManager);
+    ABinderProcess_setThreadPoolMaxThreadCount(0);
+    sp<FuzzerActivityManager> am = new FuzzerActivityManager();
+    fakeServiceManager->addService(String16("activity"), IInterface::asBinder(am));
+
+    sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
+    fakeServiceManager->addService(String16("sensor_privacy"),
+                                   IInterface::asBinder(sensorPrivacyManager));
+    sp<FuzzerPermissionController> permissionController = new FuzzerPermissionController();
+    fakeServiceManager->addService(String16("permission"),
+                                   IInterface::asBinder(permissionController));
+
+    sp<FuzzerSchedulingPolicyService> schedulingService = new FuzzerSchedulingPolicyService();
+    fakeServiceManager->addService(String16("scheduling_policy"),
+                                   IInterface::asBinder(schedulingService));
+
+    const auto audioFlingerObj = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlingerObj);
+
+    fakeServiceManager->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                   IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+    fakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                   false /* allowIsolated */,
+                                   IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT);
+
+    sp<IBinder> binder =
+            fakeServiceManager->getService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME));
+    gAudioFlingerService = interface_cast<media::IAudioFlingerService>(binder);
+    return 0;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    if (!gAudioFlingerService) {
+        return 0;
+    }
+
+    fuzzService(media::IAudioFlingerService::asBinder(gAudioFlingerService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 8f8c9dd..0215f3c 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -178,6 +178,7 @@
 
     // helper function to obtain AudioFlinger service handle
     static const sp<IAudioFlinger> get_audio_flinger();
+    static const sp<IAudioFlinger> get_audio_flinger_for_fuzzer();
 
     static float linearToLog(int volume);
     static int logToLinear(float volume);
@@ -876,6 +877,7 @@
     static audio_io_handle_t getOutput(audio_stream_type_t stream);
     static const sp<AudioFlingerClient> getAudioFlingerClient();
     static sp<AudioIoDescriptor> getIoDescriptor(audio_io_handle_t ioHandle);
+    static const sp<IAudioFlinger> getAudioFlingerImpl(bool canStartThreadPool);
 
     // Invokes all registered error callbacks with the given error code.
     static void reportError(status_t err);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 8f712db..523383f 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1238,6 +1238,11 @@
             bool     isDirect_l() const
                 { return (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0; }
 
+            bool     isAfTrackOffloadedOrDirect_l() const
+                { return isOffloadedOrDirect_l() ||
+                        (mAfTrackFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD|
+                                AUDIO_OUTPUT_FLAG_DIRECT)) != 0; }
+
             // pure pcm data is mixable (which excludes HW_AV_SYNC, with embedded timing)
             bool     isPurePcmData_l() const
                 { return audio_is_linear_pcm(mFormat)
@@ -1295,6 +1300,7 @@
     uint32_t                mAfSampleRate;          // AudioFlinger sample rate
     uint32_t                mAfChannelCount;        // AudioFlinger channel count
     audio_format_t          mAfFormat;              // AudioFlinger format
+    audio_output_flags_t    mAfTrackFlags;          // AudioFlinger track flags
 
     // constant after constructor or set()
     audio_format_t          mFormat;                // as requested by client, not forced to 16-bit
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 3c96862..35a3208 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -119,6 +119,7 @@
         uint32_t afLatencyMs;
         audio_channel_mask_t afChannelMask;
         audio_format_t afFormat;
+        audio_output_flags_t afTrackFlags;
         audio_io_handle_t outputId;
         audio_port_handle_t portId;
         sp<media::IAudioTrack> audioTrack;
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 19d1abc..e6916cc 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -53,7 +53,7 @@
         ASSERT_NE(nullptr, ap);
         ASSERT_EQ(OK, ap->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
                 << "Unable to open Resource";
-        EXPECT_EQ(OK, ap->create()) << "track creation failed";
+        ASSERT_EQ(OK, ap->create()) << "track creation failed";
         sp<OnAudioDeviceUpdateNotifier> cb = sp<OnAudioDeviceUpdateNotifier>::make();
         EXPECT_EQ(OK, ap->getAudioTrackHandle()->addAudioDeviceCallback(cb));
         EXPECT_EQ(OK, ap->start()) << "audio track start failed";
@@ -87,7 +87,7 @@
     sp<AudioCapture> capture = sp<AudioCapture>::make(
             AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
     ASSERT_NE(nullptr, capture);
-    EXPECT_EQ(OK, capture->create()) << "record creation failed";
+    ASSERT_EQ(OK, capture->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, capture->getAudioRecordHandle()->addAudioDeviceCallback(cbCapture));
 
@@ -98,7 +98,7 @@
     ASSERT_NE(nullptr, playback);
     ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
             << "Unable to open Resource";
-    EXPECT_EQ(OK, playback->create()) << "track creation failed";
+    ASSERT_EQ(OK, playback->create()) << "track creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
 
@@ -180,7 +180,7 @@
     ASSERT_NE(nullptr, playback);
     ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
             << "Unable to open Resource";
-    EXPECT_EQ(OK, playback->create()) << "track creation failed";
+    ASSERT_EQ(OK, playback->create()) << "track creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbPlayback = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, playback->getAudioTrackHandle()->addAudioDeviceCallback(cbPlayback));
 
@@ -188,7 +188,7 @@
     sp<AudioCapture> captureA = sp<AudioCapture>::make(
             AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
     ASSERT_NE(nullptr, captureA);
-    EXPECT_EQ(OK, captureA->create()) << "record creation failed";
+    ASSERT_EQ(OK, captureA->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCaptureA = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, captureA->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureA));
 
@@ -199,7 +199,7 @@
             AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
             AUDIO_INPUT_FLAG_NONE, AUDIO_SESSION_ALLOCATE, AudioRecord::TRANSFER_CALLBACK, &attr);
     ASSERT_NE(nullptr, captureB);
-    EXPECT_EQ(OK, captureB->create()) << "record creation failed";
+    ASSERT_EQ(OK, captureB->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCaptureB = sp<OnAudioDeviceUpdateNotifier>::make();
     EXPECT_EQ(OK, captureB->getAudioRecordHandle()->addAudioDeviceCallback(cbCaptureB));
 
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index 2ba1fc3..6834b7d 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -476,35 +476,37 @@
 }
 
 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
-    if (heifColor == (HeifColorFormat)mOutputColor) {
-        return true;
-    }
-
+    android_pixel_format_t outputColor;
     switch(heifColor) {
         case kHeifColorFormat_RGB565:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
+            outputColor = HAL_PIXEL_FORMAT_RGB_565;
             break;
         }
         case kHeifColorFormat_RGBA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_8888;
             break;
         }
         case kHeifColorFormat_BGRA_8888:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
+            outputColor = HAL_PIXEL_FORMAT_BGRA_8888;
             break;
         }
         case kHeifColorFormat_RGBA_1010102:
         {
-            mOutputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
+            outputColor = HAL_PIXEL_FORMAT_RGBA_1010102;
             break;
         }
         default:
             ALOGE("Unsupported output color format %d", heifColor);
             return false;
     }
+    if (outputColor == mOutputColor) {
+        return true;
+    }
+
+    mOutputColor = outputColor;
 
     if (mFrameDecoded) {
         return reinit(nullptr);
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index c43ef66..f498453 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -38,16 +38,10 @@
     Mutex::Autolock _l(sServiceLock);
     if (sMediaPlayerService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
-        sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("Media player service not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
-
+        sp<IBinder> binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 2ae76b3..40fd022 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -41,14 +41,10 @@
     if (sService == 0) {
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(String16("media.player"));
-            if (binder != 0) {
-                break;
-            }
-            ALOGW("MediaPlayerService not published, waiting...");
-            usleep(500000); // 0.5 s
-        } while (true);
+        binder = sm->waitForService(String16("media.player"));
+        if (binder == nullptr) {
+            return nullptr;
+        }
         if (sDeathNotifier == NULL) {
             sDeathNotifier = new DeathNotifier();
         }
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index f80a467..26aa375 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -184,6 +184,7 @@
 #define AMEDIAMETRICS_PROP_PLAYERIID      "playerIId"      // int32 (-1 invalid/unset IID)
 #define AMEDIAMETRICS_PROP_ROUTEDDEVICEID "routedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATE     "sampleRate"     // int32
+#define AMEDIAMETRICS_PROP_SAMPLERATECLIENT "sampleRateClient" // int32
 #define AMEDIAMETRICS_PROP_SAMPLERATEHARDWARE "sampleRateHardware" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDDEVICEID "selectedDeviceId" // int32
 #define AMEDIAMETRICS_PROP_SELECTEDMICDIRECTION "selectedMicDirection" // int32
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 89e9806..434ae00 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -49,7 +49,7 @@
     defaults: ["libnbaio_mono_defaults"],
 }
 
-cc_library_shared {
+cc_library {
     name: "libnbaio",
     defaults: ["libnbaio_mono_defaults"],
     srcs: [
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 967c316..4441121 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -32,6 +32,7 @@
 #include <media/stagefright/MetaData.h>
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
+#include <camera/StringUtils.h>
 #include <gui/Surface.h>
 #include <utils/String8.h>
 #include <cutils/properties.h>
@@ -146,7 +147,7 @@
 
 status_t CameraSource::isCameraAvailable(
     const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
-    int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
+    int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
 
     if (camera == 0) {
         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
@@ -535,7 +536,7 @@
     status_t err = OK;
 
     if ((err = isCameraAvailable(camera, proxy, cameraId,
-            clientName, clientUid, clientPid)) != OK) {
+            toStdString(clientName), clientUid, clientPid)) != OK) {
         ALOGE("Camera connection could not be established.");
         return err;
     }
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index b5bd975..57937f9 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -16,7 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "FrameDecoder"
-
+#define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include "include/FrameDecoder.h"
 #include "include/FrameCaptureLayer.h"
 #include "include/HevcUtils.h"
@@ -41,6 +41,7 @@
 #include <media/stagefright/Utils.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
+#include <utils/Trace.h>
 
 namespace android {
 
@@ -340,6 +341,7 @@
 }
 
 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
+    ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
     status_t err = onExtractRect(rect);
     if (err == OK) {
         err = extractInternal();
@@ -713,6 +715,7 @@
     }
     converter.setSrcColorSpace(standard, range, transfer);
     if (converter.isValid()) {
+        ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
         converter.convert(
                 (const uint8_t *)videoFrameBuffer->data(),
                 width, height, stride,
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ea24126..aeee3a8 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,7 +31,6 @@
 #include "include/SoftwareRenderer.h"
 
 #include <android/api-level.h>
-#include <android/binder_manager.h>
 #include <android/content/pm/IPackageManagerNative.h>
 #include <android/hardware/cas/native/1.0/IDescrambler.h>
 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -74,7 +73,6 @@
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaCodecList.h>
-#include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/OMXClient.h>
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index e8770ed..fcd17b9 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -259,7 +259,7 @@
     status_t isCameraAvailable(const sp<hardware::ICamera>& camera,
                                const sp<ICameraRecordingProxy>& proxy,
                                int32_t cameraId,
-                               const String16& clientName,
+                               const std::string& clientName,
                                uid_t clientUid,
                                pid_t clientPid);
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 41f2d67..bc83410 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -105,6 +105,7 @@
 
     mRTCPAddr = mRTPAddr;
     mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+    mVPSBuf = NULL;
     mSPSBuf = NULL;
     mPPSBuf = NULL;
 
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
new file mode 100644
index 0000000..a2791ba
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -0,0 +1,89 @@
+/*
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at:
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+cc_defaults {
+    name: "libstagefright_rtsp_fuzzer_defaults",
+    shared_libs: [
+        "liblog",
+        "libmedia",
+        "libutils",
+        "libstagefright_foundation",
+    ],
+    static_libs: [
+        "libdatasource",
+        "libstagefright_rtsp",
+    ],
+    header_libs: [
+        "libstagefright_rtsp_headers",
+    ],
+    fuzz_config:{
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+    },
+}
+
+cc_fuzz {
+    name: "sdploader_fuzzer",
+    srcs: [
+        "sdploader_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ]
+}
+
+cc_fuzz {
+    name: "rtp_writer_fuzzer",
+    srcs: [
+        "rtp_writer_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+    shared_libs:[
+        "libandroid_net",
+        "libbase",
+        "libstagefright",
+        "libcutils",
+    ],
+}
+
+cc_fuzz {
+    name: "packet_source_fuzzer",
+    srcs: [
+        "packet_source_fuzzer.cpp",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+}
+
+cc_fuzz {
+    name: "rtsp_connection_fuzzer",
+    srcs: [
+        "rtsp_connection_fuzzer.cpp",
+    ],
+    shared_libs: [
+        "libcrypto",
+        "libcutils",
+        "libnetd_client",
+    ],
+    defaults: [
+        "libstagefright_rtsp_fuzzer_defaults",
+    ],
+}
diff --git a/media/libstagefright/rtsp/fuzzer/README.md b/media/libstagefright/rtsp/fuzzer/README.md
new file mode 100644
index 0000000..bc7be29
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/README.md
@@ -0,0 +1,117 @@
+# Fuzzers for libstagefright_rtsp
+
+## Table of contents
++ [sdploader_fuzzer](#SDPLoader)
++ [rtp_writer_fuzzer](#ARTPWriter)
++ [packet_source_fuzzer](#packetSource)
++ [rtsp_connection_fuzzer](#ARTSPConnection)
+
+# <a name="SDPLoader"></a> Fuzzer for SDPLoader
+
+SDPLoader supports the following parameters:
+1. Flag (parameter name: "flags")
+2. URL (parameter name: "url")
+3. Header (parameter name: "headers")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`flags`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`url`| `String` |Value obtained from FuzzedDataProvider|
+|`headers`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) sdploader_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/sdploader_fuzzer/sdploader_fuzzer
+```
+
+# <a name="ARTPWriter"></a> Fuzzer for ARTPWriter
+
+ARTPWriter supports the following parameters:
+1. File descriptor (parameter name: "fd")
+2. Local Ip (parameter name: "localIp")
+3. Local Port (parameter name: "localPort")
+4. Remote Ip (parameter name: "remoteIp")
+5. Remote Port (parameter name: "remotePort")
+6. Sequence No (parameter name: "seqNo")
+7. OpponentID (parameter name: "opponentID")
+8. Bit Rate (parameter name: "bitrate")
+9. kKeyMIMETypeArray (parameter name: "mimeType")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`localIp`| `String` |Value obtained from FuzzedDataProvider|
+|`localPort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`remoteIp`| `String` |Value obtained from FuzzedDataProvider|
+|`remotePort`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`seqNo`| `0`  to  `10000000` |Value obtained from FuzzedDataProvider|
+|`opponentID`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`bitrate`| `UINT32_MIN`  to  `UINT32_MAX` |Value obtained from FuzzedDataProvider|
+|`mimeType`| 0. `MEDIA_MIMETYPE_VIDEO_AVC`<br> 1. `MEDIA_MIMETYPE_VIDEO_HEVC`<br> 2. `MEDIA_MIMETYPE_VIDEO_H263`<br> 3. `MEDIA_MIMETYPE_AUDIO_AMR_NB`<br> 4. `MEDIA_MIMETYPE_AUDIO_AMR_WB`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) rtp_writer_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/rtp_writer_fuzzer/rtp_writer_fuzzer
+```
+
+# <a name="packetSource"></a> Fuzzer for  PacketSource
+
+ PacketSource supports the following parameters:
+1. Codec (parameter name: "kCodecs")
+2. Format (parameter name: "kFmtp")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kCodecs`| 0. `opus`<br/>1. `ISAC`<br/>2. `VP8`<br/>3. `google-data`<br/>4. `G722`<br/>5. `PCMU`<br/>6. `PCMA`<br/>7. `CN`<br/>8. `telephone-event`<br/>9. `VP9`<br/>10. `red`<br/>11. `ulpfec`<br/>12. `rtx`<br/>13. `H264`<br/>14. `iLBC`<br/>15. `H261`<br/>16. `MPV`<br/>17. `H263`<br/>18. `AMR`<br/>19. `AC3`<br/>20. `G723`<br/>21. `G729A`<br/>22. `MP4V-ES`<br/>23. `H265`<br/>24. `H263-2000`<br/>25. `H263-1998`<br/>26. `AMR-WB`<br/>27. `MP4A-LATM`<br/>28. `MP2T`<br/>29. `mpeg4-generic` |Value obtained from FuzzedDataProvider|
+|`kFmtp`| <br/>0. `br=`<br/>1. `bw=`<br/>2. `ch-aw-recv=`<br/>3. `mode-change-capability=`<br/>4. `max-red =`<br/>5. `octet-align=`<br/>6. `mode-change-capability=`<br/>7. `profile-level-id=`<br/>8. `packetization-mode=`<br/>9. `profile=`<br/>10. `level=` <br/>11. `apt=`<br/>12. `annexb=`<br/>13. `protocol=`<br/>14. `config=`<br/>15. `streamtype=`<br/>16. `mode=`<br/>17. `sizelength=`<br/>18. `indexlength=`<br/>19. `indexdeltalength=`<br/>20. `minptime=`<br/>21. `useinbandfec=`<br/>22. `maxplaybackrate=`<br/>23. `stereo=`<br/>24. `level-asymmetry-allowed=`<br/>25. `max-fs=`<br/>26. `max-fr=`|Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) packet_source_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/packet_source_fuzzer/packet_source_fuzzer
+```
+
+# <a name="ARTSPConnection"></a> Fuzzer for ARTSPConnection
+
+## Design Considerations
+This fuzzer aims at covering ARTSPConnection.cpp. A server is implemented in the fuzzer. After accepting a connect request, the server accepts the connections and handles them in a seperate thread. The threads are maintained in a ThreadPool which limits the maximum number of threads alive at a time. When the fuzzer process ends, all the threads in the ThreadPool are joined to the main thread.
+The inputs to the server are generated using FuzzedDataProvider and stored in a variable 'mFuzzData'. As this variable is shared among multiple threads, mutex is used to ensure synchronization.
+### Fuzzer Inputs:
+The inputs generated in the fuzzer using FuzzzedDataProvider have been randomized as much as possible. Due to the constraints in the module source code, the inputs have to be limited and arranged in some specific format.
+
+ARTSPConnection supports the following parameters:
+1. Authentication Type (parameter name: "kAuthType")
+2. FuzzData (parameter name: "mFuzzData")
+3. RequestData (parameter name: "mFuzzRequestData")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kAuthType`| 0.`Basic`<br/>1.`Digest`|Value obtained from FuzzedDataProvider|
+|`mFuzzData`| `String` |Value obtained from FuzzedDataProvider|
+|`mFuzzRequestData`| `String` |Value obtained from FuzzedDataProvider|
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) rtsp_connection_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/rtsp_connection_fuzzer/rtsp_connection_fuzzer
diff --git a/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp
new file mode 100644
index 0000000..a3d7535
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/packet_source_fuzzer.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+#include <media/stagefright/rtsp/APacketSource.h>
+#include <media/stagefright/rtsp/ASessionDescription.h>
+
+using namespace android;
+
+static constexpr int32_t kMinValue = 0;
+static constexpr int32_t kMaxIPAddress = 255;
+static constexpr int32_t kMaxFmt = 255;
+static constexpr int32_t kMinAPICase = 0;
+static constexpr int32_t kMaxPacketSourceAPI = 5;
+static constexpr size_t kMinIndex = 1;
+static constexpr size_t kMaxCodecConfigs = 4;
+
+std::string kCodecs[] = {"opus",        "ISAC",         "VP8",
+                         "google-data", "G722",         "PCMU",
+                         "PCMA",        "CN",           "telephone-event",
+                         "VP9",         "red",          "ulpfec",
+                         "rtx",         "H264",         "iLBC",
+                         "H261",        "MPV",          "H263",
+                         "AMR",         "AC3",          "G723",
+                         "G729A",       "H264",         "MP4V-ES",
+                         "H265",        "H263-2000",    "H263-1998",
+                         "AMR",         "AMR-WB",       "MP4A-LATM",
+                         "MP2T",        "mpeg4-generic"};
+
+std::string kFmtp[] = {"br=",
+                       "bw=",
+                       "ch-aw-recv=",
+                       "mode-change-capability=",
+                       "max-red =",
+                       "octet-align=",
+                       "mode-change-capability=",
+                       "max-red=",
+                       "profile-level-id=",
+                       "packetization-mode=",
+                       "profile=",
+                       "level=",
+                       "apt=",
+                       "annexb=",
+                       "protocol=",
+                       "streamtype=",
+                       "mode=",
+                       "sizelength=",
+                       "indexlength=",
+                       "indexdeltalength=",
+                       "minptime=",
+                       "useinbandfec=",
+                       "maxplaybackrate=",
+                       "stereo=",
+                       "level-asymmetry-allowed=",
+                       "max-fs=",
+                       "max-fr="};
+
+std::string kCodecConfigString[kMaxCodecConfigs][2] = {{"H264", "profile-level-id="},
+                                                       {"MP4A-LATM", "config="},
+                                                       {"MP4V-ES", "config="},
+                                                       {"mpeg4-generic", "mode="}};
+
+class ASessionPacketFuzzer {
+  public:
+    ASessionPacketFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+
+  private:
+    FuzzedDataProvider mFdp;
+};
+
+bool checkFormatSupport(const std::string& codec, const std::string& format) {
+    for (int i = 0; i < kMaxCodecConfigs; ++i) {
+        if (codec == kCodecConfigString[i][0]) {
+            if (format == kCodecConfigString[i][1]) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+void ASessionPacketFuzzer::process() {
+    AString inputString;
+    const sp<ASessionDescription> sessionPacket = sp<ASessionDescription>::make();
+    std::string codec = mFdp.PickValueInArray(kCodecs);
+    std::string ipAddress =
+            std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." +
+            std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." +
+            std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxIPAddress)) + "." + "0";
+    std::string format = mFdp.PickValueInArray(kFmtp);
+    std::string fmptStr = format + std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxFmt)) +
+                          ";" + mFdp.PickValueInArray(kFmtp) +
+                          std::to_string(mFdp.ConsumeIntegralInRange(kMinValue, kMaxFmt));
+    sessionPacket->SDPStringFactory(
+            inputString, ipAddress.c_str() /* ip */, mFdp.ConsumeBool() /* isAudio */,
+            mFdp.ConsumeIntegral<unsigned int>() /* port */,
+            mFdp.ConsumeIntegral<unsigned int>() /* payloadType */,
+            mFdp.ConsumeIntegral<unsigned int>() /* as */, codec.c_str(), /* codec */
+            fmptStr.c_str() /* fmtp */, mFdp.ConsumeIntegral<int32_t>() /* width */,
+            mFdp.ConsumeIntegral<int32_t>() /* height */,
+            mFdp.ConsumeIntegral<int32_t>() /* cvoExtMap */);
+    sessionPacket->setTo(inputString.c_str(), inputString.size());
+    size_t trackSize = sessionPacket->countTracks();
+    AString desc = nullptr;
+    while (mFdp.remaining_bytes()) {
+        int32_t packetSourceAPI =
+                mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxPacketSourceAPI);
+        switch (packetSourceAPI) {
+            case 0: {
+                unsigned long payload = 0;
+                AString params = nullptr;
+                sessionPacket->getFormatType(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+                                             &payload, &desc, &params);
+                break;
+            }
+            case 1: {
+                int32_t width, height;
+                unsigned long payload = mFdp.ConsumeIntegral<unsigned long>();
+                sessionPacket->getDimensions(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+                                             payload, &width, &height);
+                break;
+            }
+            case 2: {
+                int32_t cvoExtMap = mFdp.ConsumeIntegral<int32_t>();
+                sessionPacket->getCvoExtMap(mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1),
+                                            &cvoExtMap);
+                break;
+            }
+            case 3: {
+                int64_t durationUs = mFdp.ConsumeIntegral<int64_t>();
+                sessionPacket->getDurationUs(&durationUs);
+                break;
+            }
+            case 4: {
+                int32_t timeScale, numChannels;
+                if (desc != nullptr) {
+                    sessionPacket->ParseFormatDesc(desc.c_str(), &timeScale, &numChannels);
+                }
+                break;
+            }
+            case 5: {
+                if (checkFormatSupport(codec, format)) {
+                    sp<APacketSource> packetSource = sp<APacketSource>::make(
+                            sessionPacket, mFdp.ConsumeIntegralInRange(kMinIndex, trackSize - 1));
+                }
+                break;
+            }
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ASessionPacketFuzzer packetSourceFuzzer(data, size);
+    packetSourceFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
new file mode 100644
index 0000000..8d9f923
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtp_writer_fuzzer.cpp
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/ARTPWriter.h>
+
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 65536;
+constexpr int32_t kMaxTime = 1000;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kAMRNBFrameSizes[] = {13, 14, 16, 18, 20, 21, 27, 32};
+constexpr int32_t kAMRWBFrameSizes[] = {18, 24, 33, 37, 41, 47, 51, 59, 61};
+constexpr int32_t kAMRIndexOffset = 8;
+
+using namespace android;
+
+const char* kKeyMimeTypeArray[] = {MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_HEVC,
+                                   MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AMR_NB,
+                                   MEDIA_MIMETYPE_AUDIO_AMR_WB};
+
+struct TestMediaSource : public MediaSource {
+  public:
+    TestMediaSource(FuzzedDataProvider& mFdp) : mTestMetaData(new MetaData) {
+        int32_t vectorSize = 0;
+        mAllowRead = mFdp.ConsumeBool();
+        mKeySps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyVps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyPps = mFdp.ConsumeIntegral<int32_t>();
+        mKeyTime = mFdp.ConsumeIntegralInRange<int64_t>(kMinSize, kMaxTime);
+
+        mMimeType = mFdp.PickValueInArray(kKeyMimeTypeArray);
+        mTestMetaData->setCString(kKeyMIMEType, mMimeType);
+        if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_NB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRNBFrameSizes) - 1);
+            vectorSize = kAMRNBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_AUDIO_AMR_WB) {
+            int32_t index =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, std::size(kAMRWBFrameSizes) - 1);
+            vectorSize = kAMRWBFrameSizes[index];
+            mData.push_back(kAMRIndexOffset * index);
+        } else if (mMimeType == MEDIA_MIMETYPE_VIDEO_H263) {
+            // Required format for H263 media data
+            mData.push_back(0);
+            mData.push_back(0);
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        } else {
+            vectorSize = mFdp.ConsumeIntegralInRange<int32_t>(kMinSize, kMaxSize);
+        }
+        for (size_t idx = mData.size(); idx < vectorSize; ++idx) {
+            mData.push_back(mFdp.ConsumeIntegral<uint8_t>());
+        }
+    }
+    virtual status_t start(MetaData* /*params*/) { return OK; }
+    virtual status_t stop() { return OK; }
+    virtual sp<MetaData> getFormat() { return mTestMetaData; }
+    virtual status_t read(MediaBufferBase** buffer, const ReadOptions* /*options*/) {
+        if (!mAllowRead) {
+            return -1;
+        }
+        *buffer = new MediaBuffer(mData.data() /*data*/, mData.size() /*size*/);
+        if (mKeySps) {
+            (*buffer)->meta_data().setInt32(kKeySps, mKeySps);
+        }
+        if (mKeyVps) {
+            (*buffer)->meta_data().setInt32(kKeyVps, mKeyVps);
+        }
+        if (mKeyPps) {
+            (*buffer)->meta_data().setInt32(kKeyPps, mKeyPps);
+        }
+        (*buffer)->meta_data().setInt64(kKeyTime, mKeyTime);
+        return OK;
+    }
+
+  private:
+    int32_t mKeySps;
+    int32_t mKeyVps;
+    int32_t mKeyPps;
+    int64_t mKeyTime;
+    bool mAllowRead;
+    const char* mMimeType;
+    sp<MetaData> mTestMetaData;
+    std::vector<uint8_t> mData;
+};
+
+class ARTPWriterFuzzer {
+  public:
+    ARTPWriterFuzzer(const uint8_t* data, size_t size)
+        : mDataSourceFd(memfd_create("InputFile", MFD_ALLOW_SEALING)), mFdp(data, size) {}
+    ~ARTPWriterFuzzer() { close(mDataSourceFd); }
+    void process();
+
+  private:
+    void createARTPWriter();
+    const int32_t mDataSourceFd;
+    FuzzedDataProvider mFdp;
+    sp<ARTPWriter> mArtpWriter;
+};
+
+void ARTPWriterFuzzer::createARTPWriter() {
+    String8 localIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    String8 remoteIp = String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+    mArtpWriter = sp<ARTPWriter>::make(
+            mDataSourceFd, localIp, mFdp.ConsumeIntegral<uint16_t>() /* localPort */, remoteIp,
+            mFdp.ConsumeIntegral<uint16_t>() /* remotePort */,
+            mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize) /* seqNo */);
+}
+
+void ARTPWriterFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        mArtpWriter = sp<ARTPWriter>::make(mDataSourceFd);
+        if (mArtpWriter->getSequenceNum() > kMaxSize) {
+            createARTPWriter();
+        }
+    } else {
+        createARTPWriter();
+    }
+
+    mArtpWriter->addSource(sp<TestMediaSource>::make(mFdp) /* source */);
+
+    while (mFdp.remaining_bytes()) {
+        auto invokeRTPWriterFuzzer = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<MetaData> metaData = sp<MetaData>::make();
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeySelfID, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyPayloadType, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpExtMap, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpCvoDegrees, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt32(kKeyRtpDscp, mFdp.ConsumeIntegral<int32_t>());
+                    }
+                    if (mFdp.ConsumeBool()) {
+                        metaData->setInt64(kKeySocketNetwork, mFdp.ConsumeIntegral<int64_t>());
+                    }
+                    mArtpWriter->start(metaData.get() /*param*/);
+                },
+                [&]() {
+                    mArtpWriter->setTMMBNInfo(mFdp.ConsumeIntegral<uint32_t>() /* opponentID */,
+                                              mFdp.ConsumeIntegral<uint32_t>() /* bitrate */);
+                },
+                [&]() { mArtpWriter->stop(); },
+                [&]() {
+                    mArtpWriter->updateCVODegrees(mFdp.ConsumeIntegral<int32_t>() /* cvoDegrees */);
+                },
+                [&]() {
+                    mArtpWriter->updatePayloadType(
+                            mFdp.ConsumeIntegral<int32_t>() /* payloadType */);
+                },
+
+        });
+        invokeRTPWriterFuzzer();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    ARTPWriterFuzzer artpWriterFuzzer(data, size);
+    artpWriterFuzzer.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp
new file mode 100644
index 0000000..51c423e
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/rtsp_connection_fuzzer.cpp
@@ -0,0 +1,397 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <arpa/inet.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/rtsp/ARTSPConnection.h>
+#include <thread>
+
+using namespace android;
+
+const std::string kAuthType[] = {"Basic", "Digest"};
+const std::string kTab = "\t";
+const std::string kCSeq = "CSeq: ";
+const std::string kSpace = " ";
+const std::string kNewLine = "\n";
+const std::string kBinaryHeader = "$";
+const std::string kNonce = " nonce=\"\"";
+const std::string kRealm = " realm=\"\"";
+const std::string kHeaderBoundary = "\r\n\r\n";
+const std::string kContentLength = "content-length: ";
+const std::string kDefaultRequestValue = "INVALID_FORMAT";
+const std::string kUrlPrefix = "rtsp://root:pass@127.0.0.1:";
+const std::string kRequestMarker = "REQUEST_SENT";
+const std::string kQuitResponse = "\n\n\n\n";
+const std::string kRTSPVersion = "RTSP/1.0";
+const std::string kValidResponse = kRTSPVersion + " 200 \n";
+const std::string kAuthString = kRTSPVersion + " 401 \nwww-authenticate: ";
+constexpr char kNullValue = '\0';
+constexpr char kDefaultValue = '0';
+constexpr int32_t kWhat = 'resp';
+constexpr int32_t kMinPort = 100;
+constexpr int32_t kMaxPort = 999;
+constexpr int32_t kMinASCIIValue = 32;
+constexpr int32_t kMaxASCIIValue = 126;
+constexpr int32_t kMinContentLength = 0;
+constexpr int32_t kMaxContentLength = 1000;
+constexpr int32_t kBinaryVectorSize = 3;
+constexpr int32_t kDefaultCseqValue = 1;
+constexpr int32_t kBufferSize = 1024;
+constexpr int32_t kMaxLoopRuns = 5;
+constexpr int32_t kPort = 554;
+constexpr int32_t kMaxBytes = 128;
+constexpr int32_t kMaxThreads = 1024;
+
+struct FuzzAHandler : public AHandler {
+  public:
+    FuzzAHandler(std::function<void()> signalEosFunction)
+        : mSignalEosFunction(std::move(signalEosFunction)) {}
+    ~FuzzAHandler() = default;
+
+  protected:
+    void onMessageReceived(const sp<AMessage>& msg) override {
+        switch (msg->what()) {
+            case kWhat: {
+                mSignalEosFunction();
+                break;
+            }
+        }
+    }
+
+  private:
+    std::function<void()> mSignalEosFunction;
+};
+
+class RTSPConnectionFuzzer {
+  public:
+    RTSPConnectionFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    ~RTSPConnectionFuzzer() {
+        // wait for all the threads to join the main thread
+        for (auto& thread : mThreadPool) {
+            if (thread.joinable()) {
+                thread.join();
+            }
+        }
+        close(mServerFd);
+    }
+    void process();
+
+  private:
+    void signalEos();
+    void startServer();
+    void createFuzzData();
+    void acceptConnection();
+    void handleConnection(int32_t);
+    void handleClientResponse(int32_t);
+    void sendValidResponse(int32_t, int32_t);
+    int32_t checkSocket(int32_t);
+    size_t generateBinaryDataSize(std::string);
+    bool checkValidRequestData(const AString&);
+    bool mEosReached = false;
+    bool mServerFailure = false;
+    bool mNotifyResponseListener = false;
+    int32_t mServerFd;
+    std::string mFuzzData = "";
+    std::string mFuzzRequestData = "";
+    std::string mRequestData = kDefaultRequestValue;
+    std::mutex mFuzzDataMutex;
+    std::mutex mMsgPostCompleteMutex;
+    std::condition_variable mConditionalVariable;
+    std::vector<std::thread> mThreadPool;
+    FuzzedDataProvider mFdp;
+};
+
+size_t RTSPConnectionFuzzer::generateBinaryDataSize(std::string values) {
+    // computed the binary data size as done in ARTSPConnection.cpp
+    uint8_t x = values[0];
+    uint8_t y = values[1];
+    return x << 8 | y;
+}
+
+bool RTSPConnectionFuzzer::checkValidRequestData(const AString& request) {
+    if (request.find(kHeaderBoundary.c_str()) <= 0) {
+        return false;
+    }
+    ssize_t space = request.find(kSpace.c_str());
+    if (space <= 0) {
+        return false;
+    }
+    if (request.find(kSpace.c_str(), space + 1) <= 0) {
+        return false;
+    }
+    return true;
+}
+
+void RTSPConnectionFuzzer::createFuzzData() {
+    std::unique_lock fuzzLock(mFuzzDataMutex);
+    mFuzzData = "";
+    mFuzzRequestData = "";
+    int32_t contentLength = 0;
+    if (mFdp.ConsumeBool()) {
+        if (mFdp.ConsumeBool()) {
+            // if we want to handle server request
+            mFuzzData.append(kSpace + kSpace + kRTSPVersion);
+        } else {
+            // if we want to notify response listener
+            mFuzzData.append(
+                    kRTSPVersion + kSpace +
+                    std::to_string(mFdp.ConsumeIntegralInRange<uint16_t>(kMinPort, kMaxPort)) +
+                    kSpace);
+        }
+        mFuzzData.append(kNewLine);
+        if (mFdp.ConsumeBool()) {
+            contentLength =
+                    mFdp.ConsumeIntegralInRange<int32_t>(kMinContentLength, kMaxContentLength);
+            mFuzzData.append(kContentLength + std::to_string(contentLength) + kNewLine);
+            if (mFdp.ConsumeBool()) {
+                mFdp.ConsumeBool() ? mFuzzData.append(kSpace + kNewLine)
+                                   : mFuzzData.append(kTab + kNewLine);
+            }
+        }
+        // new line to break out of infinite for loop
+        mFuzzData.append(kNewLine);
+        if (contentLength) {
+            std::string contentData = mFdp.ConsumeBytesAsString(contentLength);
+            contentData.resize(contentLength, kDefaultValue);
+            mFuzzData.append(contentData);
+        }
+    } else {
+        // for binary data
+        std::string randomValues(kBinaryVectorSize, kNullValue);
+        for (size_t idx = 0; idx < kBinaryVectorSize; ++idx) {
+            randomValues[idx] =
+                    (char)mFdp.ConsumeIntegralInRange<uint8_t>(kMinASCIIValue, kMaxASCIIValue);
+        }
+        size_t binaryDataSize = generateBinaryDataSize(randomValues);
+        std::string data = mFdp.ConsumeBytesAsString(binaryDataSize);
+        data.resize(binaryDataSize, kDefaultValue);
+        mFuzzData.append(kBinaryHeader + randomValues + data);
+    }
+    if (mFdp.ConsumeBool()) {
+        mRequestData = mFdp.ConsumeRandomLengthString(kMaxBytes) + kSpace + kSpace +
+                       kHeaderBoundary + mFdp.ConsumeRandomLengthString(kMaxBytes);
+        // Check if Request data is valid
+        if (checkValidRequestData(mRequestData.c_str())) {
+            if (mFdp.ConsumeBool()) {
+                if (mFdp.ConsumeBool()) {
+                    // if we want to handle server request
+                    mFuzzRequestData.append(kSpace + kSpace + kRTSPVersion + kNewLine);
+                } else {
+                    // if we want to add authentication headers
+                    mNotifyResponseListener = true;
+                    mFuzzRequestData.append(kAuthString);
+                    if (mFdp.ConsumeBool()) {
+                        // for Authentication type: Basic
+                        mFuzzRequestData.append(kAuthType[0]);
+                    } else {
+                        // for Authentication type: Digest
+                        mFuzzRequestData.append(kAuthType[1]);
+                        mFuzzRequestData.append(kNonce);
+                        mFuzzRequestData.append(kRealm);
+                    }
+                    mFuzzRequestData.append(kNewLine);
+                }
+            } else {
+                mNotifyResponseListener = false;
+                mFuzzRequestData.append(kValidResponse);
+            }
+        } else {
+            mRequestData = kDefaultRequestValue;
+        }
+    } else {
+        mRequestData = kDefaultRequestValue;
+        mFuzzData.append(kNewLine);
+    }
+}
+
+void RTSPConnectionFuzzer::signalEos() {
+    mEosReached = true;
+    mConditionalVariable.notify_all();
+    return;
+}
+
+int32_t RTSPConnectionFuzzer::checkSocket(int32_t newSocket) {
+    struct timeval tv;
+    tv.tv_sec = 1;
+    tv.tv_usec = 0;
+
+    fd_set rs;
+    FD_ZERO(&rs);
+    FD_SET(newSocket, &rs);
+
+    return select(newSocket + 1, &rs, nullptr, nullptr, &tv);
+}
+
+void RTSPConnectionFuzzer::sendValidResponse(int32_t newSocket, int32_t cseq = -1) {
+    std::string validResponse = kValidResponse;
+    if (cseq != -1) {
+        validResponse.append(kCSeq + std::to_string(cseq));
+        validResponse.append(kNewLine + kNewLine);
+    } else {
+        validResponse.append(kNewLine);
+    }
+    send(newSocket, validResponse.c_str(), validResponse.size(), 0);
+}
+
+void RTSPConnectionFuzzer::handleClientResponse(int32_t newSocket) {
+    char buffer[kBufferSize] = {0};
+    if (checkSocket(newSocket) == 1) {
+        read(newSocket, buffer, kBufferSize);
+    }
+}
+
+void RTSPConnectionFuzzer::handleConnection(int32_t newSocket) {
+    std::unique_lock fuzzLock(mFuzzDataMutex);
+    send(newSocket, mFuzzData.c_str(), mFuzzData.size(), 0);
+    if (mFuzzData[0] == kSpace[0]) {
+        handleClientResponse(newSocket);
+    }
+
+    if (mFuzzRequestData != "") {
+        char buffer[kBufferSize] = {0};
+        if (checkSocket(newSocket) == 1 && recv(newSocket, buffer, kBufferSize, MSG_DONTWAIT) > 0) {
+            // Extract the 'CSeq' value present at the end of header
+            std::string clientResponse(buffer);
+            std::string header = clientResponse.substr(0, clientResponse.find(kHeaderBoundary));
+            char cseq = header[header.rfind(kCSeq) + kCSeq.length()];
+            int32_t cseqValue = cseq ? cseq - '0' : kDefaultCseqValue;
+            std::string response = mFuzzRequestData;
+            response.append(kCSeq + std::to_string(cseqValue));
+            response.append(kNewLine + kNewLine);
+            send(newSocket, response.data(), response.length(), 0);
+
+            if (!mNotifyResponseListener) {
+                char buffer[kBufferSize] = {0};
+                if (checkSocket(newSocket) == 1) {
+                    if (recv(newSocket, buffer, kBufferSize, MSG_DONTWAIT) > 0) {
+                        // Extract the 'CSeq' value present at the end of header
+                        std::string clientResponse(buffer);
+                        std::string header =
+                                clientResponse.substr(0, clientResponse.find(kHeaderBoundary));
+                        char cseq = header[header.rfind(kCSeq) + kCSeq.length()];
+                        int32_t cseqValue = cseq ? cseq - '0' : kDefaultCseqValue;
+                        sendValidResponse(newSocket, cseqValue);
+                    } else {
+                        sendValidResponse(newSocket);
+                    }
+                }
+            }
+        } else {
+            // If no data to read, then send a valid response
+            // to release the mutex lock in fuzzer
+            sendValidResponse(newSocket);
+        }
+    }
+    send(newSocket, kQuitResponse.c_str(), kQuitResponse.size(), 0);
+}
+
+void RTSPConnectionFuzzer::startServer() {
+    signal(SIGPIPE, SIG_IGN);
+    mServerFd = socket(AF_INET, SOCK_STREAM | SOCK_CLOEXEC, 0);
+    struct sockaddr_in serverAddress;
+    serverAddress.sin_family = AF_INET;
+    serverAddress.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+    serverAddress.sin_port = htons(kPort);
+
+    // Get rid of "Address in use" error
+    int32_t opt = 1;
+    if (setsockopt(mServerFd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt))) {
+        mServerFailure = true;
+    }
+
+    // Bind the socket and set for listening.
+    if (bind(mServerFd, (struct sockaddr*)(&serverAddress), sizeof(serverAddress)) < 0) {
+        mServerFailure = true;
+    }
+
+    if (listen(mServerFd, 5) < 0) {
+        mServerFailure = true;
+    }
+}
+
+void RTSPConnectionFuzzer::acceptConnection() {
+    int32_t clientFd = accept4(mServerFd, nullptr, nullptr, SOCK_CLOEXEC);
+    handleConnection(clientFd);
+    close(clientFd);
+}
+
+void RTSPConnectionFuzzer::process() {
+    startServer();
+    if (mServerFailure) {
+        return;
+    }
+    sp<ALooper> looper = sp<ALooper>::make();
+    sp<FuzzAHandler> handler =
+            sp<FuzzAHandler>::make(std::bind(&RTSPConnectionFuzzer::signalEos, this));
+    sp<ARTSPConnection> rtspConnection =
+            sp<ARTSPConnection>::make(mFdp.ConsumeBool(), mFdp.ConsumeIntegral<uint64_t>());
+    looper->start();
+    looper->registerHandler(rtspConnection);
+    looper->registerHandler(handler);
+    sp<AMessage> replymsg = sp<AMessage>::make(kWhat, handler);
+    std::string url = kUrlPrefix + std::to_string(kPort) + "/";
+
+    while (mFdp.remaining_bytes() && mThreadPool.size() < kMaxThreads) {
+        createFuzzData();
+        mThreadPool.push_back(std::thread(&RTSPConnectionFuzzer::acceptConnection, this));
+        if (mFdp.ConsumeBool()) {
+            rtspConnection->observeBinaryData(replymsg);
+        }
+
+        {
+            rtspConnection->connect(url.c_str(), replymsg);
+            std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+            mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+                if (mEosReached == true) {
+                    mEosReached = false;
+                    return true;
+                }
+                return mEosReached;
+            });
+        }
+
+        if (mRequestData != kDefaultRequestValue) {
+            rtspConnection->sendRequest(mRequestData.c_str(), replymsg);
+            std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+            mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+                if (mEosReached == true) {
+                    mEosReached = false;
+                    return true;
+                }
+                return mEosReached;
+            });
+        }
+
+        if (mFdp.ConsumeBool()) {
+            rtspConnection->disconnect(replymsg);
+            std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+            mConditionalVariable.wait(waitForMsgPostComplete, [this] {
+                if (mEosReached == true) {
+                    mEosReached = false;
+                    return true;
+                }
+                return mEosReached;
+            });
+        }
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    RTSPConnectionFuzzer rtspFuzz(data, size);
+    rtspFuzz.process();
+    return 0;
+}
diff --git a/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
new file mode 100644
index 0000000..748e5b6
--- /dev/null
+++ b/media/libstagefright/rtsp/fuzzer/sdploader_fuzzer.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <datasource/HTTPBase.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/MediaHTTPConnection.h>
+#include <media/MediaHTTPService.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/rtsp/SDPLoader.h>
+
+using namespace android;
+
+constexpr int32_t kMinCapacity = 0;
+constexpr int32_t kMaxCapacity = 1000;
+constexpr int32_t kMaxStringLength = 20;
+constexpr int32_t kMaxBytes = 128;
+enum { kWhatLoad = 'load' };
+
+struct FuzzAHandler : public AHandler {
+  public:
+    FuzzAHandler(std::function<void()> signalEosFunction) : mSignalEosFunction(signalEosFunction) {}
+
+  protected:
+    void onMessageReceived(const sp<AMessage>& msg) override {
+        switch (msg->what()) {
+            case kWhatLoad: {
+                mSignalEosFunction();
+                break;
+            }
+        }
+        return;
+    }
+
+  private:
+    std::function<void()> mSignalEosFunction;
+};
+
+struct FuzzMediaHTTPConnection : public MediaHTTPConnection {
+  public:
+    FuzzMediaHTTPConnection(FuzzedDataProvider* fdp) : mFdp(fdp) {
+        mSize = mFdp->ConsumeIntegralInRange(kMinCapacity, kMaxCapacity);
+        mData = mFdp->ConsumeBytes<uint8_t>(mSize);
+        mSize = mData.size();
+    }
+    virtual bool connect(const char* /* uri */,
+                         const KeyedVector<String8, String8>* /* headers */) {
+        return mFdp->ConsumeBool();
+    }
+    virtual void disconnect() { return; }
+    virtual ssize_t readAt(off64_t offset, void* data, size_t size) {
+        if ((size + offset <= mData.size()) && (offset >= 0)) {
+           memcpy(data, mData.data() + offset, size);
+           return size;
+        }
+        return 0;
+    }
+    virtual off64_t getSize() { return mSize; }
+    virtual status_t getMIMEType(String8* /*mimeType*/) {return mFdp->ConsumeIntegral<status_t>();}
+    virtual status_t getUri(String8* /*uri*/) {return mFdp->ConsumeIntegral<status_t>();}
+
+  private:
+    FuzzedDataProvider* mFdp = nullptr;
+    std::vector<uint8_t> mData;
+    size_t mSize = 0;
+};
+
+struct FuzzMediaHTTPService : public MediaHTTPService {
+  public:
+    FuzzMediaHTTPService(FuzzedDataProvider* fdp) : mFdp(fdp) {}
+    virtual sp<MediaHTTPConnection> makeHTTPConnection() {
+        mediaHTTPConnection = sp<FuzzMediaHTTPConnection>::make(mFdp);
+        return mediaHTTPConnection;
+    }
+
+  private:
+    sp<FuzzMediaHTTPConnection> mediaHTTPConnection = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+};
+
+class SDPLoaderFuzzer {
+  public:
+    SDPLoaderFuzzer(const uint8_t* data, size_t size) : mFdp(data, size) {}
+    void process();
+
+  private:
+    void signalEos();
+
+    bool mEosReached = false;
+    std::mutex mMsgPostCompleteMutex;
+    std::condition_variable mConditionalVariable;
+    FuzzedDataProvider mFdp;
+};
+
+void SDPLoaderFuzzer::signalEos() {
+    mEosReached = true;
+    mConditionalVariable.notify_one();
+    return;
+}
+
+void SDPLoaderFuzzer::process() {
+    sp<FuzzAHandler> handler = sp<FuzzAHandler>::make(std::bind(&SDPLoaderFuzzer::signalEos, this));
+    sp<ALooper> looper = sp<ALooper>::make();
+    looper->start();
+    looper->registerHandler(handler);
+    const sp<AMessage> notify = sp<AMessage>::make(kWhatLoad, handler);
+    sp<SDPLoader> sdpLoader =
+            sp<SDPLoader>::make(notify, mFdp.ConsumeIntegral<uint32_t>() /* flags */,
+                                sp<FuzzMediaHTTPService>::make(&mFdp) /* httpService */);
+
+    KeyedVector<String8, String8> headers;
+    for (size_t idx = 0; idx < mFdp.ConsumeIntegralInRange<size_t>(kMinCapacity, kMaxCapacity);
+         ++idx) {
+        headers.add(String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* key */,
+                    String8(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str()) /* value */);
+    }
+
+    sdpLoader->load(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str() /* url */, &headers);
+
+    std::unique_lock waitForMsgPostComplete(mMsgPostCompleteMutex);
+    mConditionalVariable.wait(waitForMsgPostComplete, [this] { return mEosReached; });
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    SDPLoaderFuzzer sdpLoaderFuzzer(data, size);
+    sdpLoaderFuzzer.process();
+    return 0;
+}
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index 38cf29d..184e4f4 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -2007,7 +2007,7 @@
             uint8_t mhac_header[mhac_header_size];
             off64_t data_offset = *offset;
 
-            if (chunk_size < sizeof(mhac_header)) {
+            if (mLastTrack == NULL || chunk_size < sizeof(mhac_header)) {
                 return ERROR_MALFORMED;
             }
 
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index 5dbcd08..6068d68 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -73,14 +73,14 @@
 }
 
 bool MtpDataPacket::getUInt8(uint8_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || (mOffset >= mBufferSize))
         return false;
     value = mBuffer[mOffset++];
     return true;
 }
 
 bool MtpDataPacket::getUInt16(uint16_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+1) >= mBufferSize))
         return false;
     int offset = mOffset;
     value = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
@@ -89,7 +89,7 @@
 }
 
 bool MtpDataPacket::getUInt32(uint32_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+3) >= mBufferSize))
         return false;
     int offset = mOffset;
     value = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
@@ -99,7 +99,7 @@
 }
 
 bool MtpDataPacket::getUInt64(uint64_t& value) {
-    if (mPacketSize - mOffset < sizeof(value))
+    if ((mPacketSize - mOffset < sizeof(value)) || ((mOffset+7) >= mBufferSize))
         return false;
     int offset = mOffset;
     value = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 2ffd775..ef8c9aa 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -297,6 +297,10 @@
 }
 
 void MtpFfsHandle::close() {
+    auto timeout = std::chrono::seconds(2);
+    std::unique_lock lk(m);
+    cv.wait_for(lk, timeout ,[this]{return child_threads==0;});
+
     io_destroy(mCtx);
     closeEndpoints();
     closeConfig();
@@ -669,6 +673,11 @@
     char *temp = new char[me.length];
     memcpy(temp, me.data, me.length);
     me.data = temp;
+
+    std::unique_lock lk(m);
+    child_threads++;
+    lk.unlock();
+
     std::thread t([this, me]() { return this->doSendEvent(me); });
     t.detach();
     return 0;
@@ -680,6 +689,11 @@
     if (static_cast<unsigned>(ret) != length)
         PLOG(ERROR) << "Mtp error sending event thread!";
     delete[] reinterpret_cast<char*>(me.data);
+
+    std::unique_lock lk(m);
+    child_threads--;
+    lk.unlock();
+    cv.notify_one();
 }
 
 } // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index e552e03..51cdef0 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -60,6 +60,10 @@
     bool mCanceled;
     bool mBatchCancel;
 
+    std::mutex m;
+    std::condition_variable cv;
+    std::atomic<int> child_threads{0};
+
     android::base::unique_fd mControl;
     // "in" from the host's perspective => sink for mtp server
     android::base::unique_fd mBulkIn;
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index f069a83..5faaac2 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -92,24 +92,46 @@
 }
 
 uint16_t MtpPacket::getUInt16(int offset) const {
-    return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+    if ((unsigned long)(offset+2) <= mBufferSize) {
+        return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+    }
+    else {
+        ALOGE("offset for buffer read is greater than buffer size!");
+        abort();
+    }
 }
 
 uint32_t MtpPacket::getUInt32(int offset) const {
-    return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
-           ((uint32_t)mBuffer[offset + 1] << 8)  | (uint32_t)mBuffer[offset];
+    if ((unsigned long)(offset+4) <= mBufferSize) {
+        return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+               ((uint32_t)mBuffer[offset + 1] << 8)  | (uint32_t)mBuffer[offset];
+    }
+    else {
+        ALOGE("offset for buffer read is greater than buffer size!");
+        abort();
+    }
 }
 
 void MtpPacket::putUInt16(int offset, uint16_t value) {
-    mBuffer[offset++] = (uint8_t)(value & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+    if ((unsigned long)(offset+2) <= mBufferSize) {
+        mBuffer[offset++] = (uint8_t)(value & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+    }
+    else {
+        ALOGE("offset for buffer write is greater than buffer size!");
+    }
 }
 
 void MtpPacket::putUInt32(int offset, uint32_t value) {
-    mBuffer[offset++] = (uint8_t)(value & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
-    mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+    if ((unsigned long)(offset+4) <= mBufferSize) {
+        mBuffer[offset++] = (uint8_t)(value & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+        mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+    }
+    else {
+        ALOGE("offset for buffer write is greater than buffer size!");
+    }
 }
 
 uint16_t MtpPacket::getContainerCode() const {
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 067c8f4..0f7d98b 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -570,6 +570,9 @@
     }
 }
 
+// The LL-NDK API is now deprecated. New devices will no longer have the token
+// manager service installed, so createHalToken will return false and this
+// will return AMEDIA_ERROR_UNKNOWN on those devices.
 media_status_t AImageReader::getWindowNativeHandle(native_handle **handle) {
     if (mWindowHandle != nullptr) {
         *handle = mWindowHandle;
diff --git a/media/ndk/fuzzer/Android.bp b/media/ndk/fuzzer/Android.bp
index a3d6a96..ba92b19 100644
--- a/media/ndk/fuzzer/Android.bp
+++ b/media/ndk/fuzzer/Android.bp
@@ -56,6 +56,14 @@
             "android-media-fuzzing-reports@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libmediandk library",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -63,6 +71,11 @@
     name: "ndk_crypto_fuzzer",
     srcs: ["ndk_crypto_fuzzer.cpp"],
     defaults: ["libmediandk_fuzzer_defaults"],
+    fuzz_config: {
+        libfuzzer_options: [
+            "max_len=10000",
+        ],
+    },
 }
 
 cc_fuzz {
@@ -116,3 +129,16 @@
     header_libs: ["libnativewindow_headers",],
     defaults: ["libmediandk_fuzzer_defaults",],
 }
+
+cc_fuzz {
+    name: "ndk_async_codec_fuzzer",
+    srcs: [
+           "ndk_async_codec_fuzzer.cpp",
+           "NdkMediaCodecFuzzerBase.cpp",
+          ],
+    header_libs: [
+           "libnativewindow_headers",
+           "libutils_headers",
+          ],
+    defaults: ["libmediandk_fuzzer_defaults",],
+}
diff --git a/media/ndk/fuzzer/README.md b/media/ndk/fuzzer/README.md
index 0fd08b0..7f6bdd7 100644
--- a/media/ndk/fuzzer/README.md
+++ b/media/ndk/fuzzer/README.md
@@ -8,6 +8,7 @@
 + [ndk_drm_fuzzer](#NdkDrm)
 + [ndk_mediamuxer_fuzzer](#NdkMediaMuxer)
 + [ndk_sync_codec_fuzzer](#NdkSyncCodec)
++ [ndk_async_codec_fuzzer](#NdkAsyncCodec)
 
 # <a name="NdkCrypto"></a> Fuzzer for NdkCrypto
 
@@ -156,3 +157,16 @@
   $ adb sync data
   $ adb shell /data/fuzz/arm64/ndk_sync_codec_fuzzer/ndk_sync_codec_fuzzer
 ```
+
+# <a name="NdkAsyncCodec"></a>Fuzzer for NdkAsyncCodec
+
+#### Steps to run
+1. Build the fuzzer
+```
+  $ mm -j$(nproc) ndk_async_codec_fuzzer
+```
+2. Run on device
+```
+  $ adb sync data
+  $ adb shell /data/fuzz/arm64/ndk_async_codec_fuzzer/ndk_sync_codec_fuzzer
+```
diff --git a/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
new file mode 100644
index 0000000..28a38fe
--- /dev/null
+++ b/media/ndk/fuzzer/ndk_async_codec_fuzzer.cpp
@@ -0,0 +1,441 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <NdkMediaCodecFuzzerBase.h>
+#include <media/NdkMediaFormatPriv.h>
+#include <mutex>
+#include <queue>
+#include <thread>
+
+using namespace android;
+using namespace std;
+
+constexpr int32_t kMaxCryptoInfoAPIs = 3;
+constexpr int32_t kMaxNdkCodecAPIs = 5;
+
+template <typename T>
+class CallBackQueue {
+  public:
+    void push(T elem) {
+        bool needsNotify = false;
+        {
+            unique_lock<mutex> lock(mMutex);
+            needsNotify = mQueue.empty();
+            mQueue.push(std::move(elem));
+        }
+        if (needsNotify) {
+            mQueueNotEmptyCondition.notify_one();
+        }
+    }
+
+    T pop() {
+        unique_lock<mutex> lock(mMutex);
+        if (mQueue.empty()) {
+            mQueueNotEmptyCondition.wait(lock, [this]() { return !mQueue.empty(); });
+        }
+        auto result = mQueue.front();
+        mQueue.pop();
+        return result;
+    }
+
+  private:
+    mutex mMutex;
+    std::queue<T> mQueue;
+    std::condition_variable mQueueNotEmptyCondition;
+};
+
+class CallBackHandle {
+  public:
+    CallBackHandle() : mSawError(false), mIsDone(false) {}
+
+    virtual ~CallBackHandle() {}
+
+    void ioThread();
+
+    // Implementation in child class (Decoder/Encoder)
+    virtual void invokeInputBufferAPI(AMediaCodec* codec, int32_t index) {
+        (void)codec;
+        (void)index;
+    }
+    virtual void onFormatChanged(AMediaCodec* codec, AMediaFormat* format) {
+        (void)codec;
+        (void)format;
+    }
+    virtual void receiveError(void) {}
+    virtual void invokeOutputBufferAPI(AMediaCodec* codec, int32_t index,
+                                       AMediaCodecBufferInfo* bufferInfo) {
+        (void)codec;
+        (void)index;
+        (void)bufferInfo;
+    }
+
+    // Keep a queue of all function callbacks.
+    typedef function<void()> IOTask;
+    CallBackQueue<IOTask> mIOQueue;
+    bool mSawError;
+    bool mIsDone;
+};
+
+void CallBackHandle::ioThread() {
+    while (!mIsDone && !mSawError) {
+        auto task = mIOQueue.pop();
+        task();
+    }
+}
+
+static void onAsyncInputAvailable(AMediaCodec* codec, void* userdata, int32_t index) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    self->mIOQueue.push([self, codec, index]() { self->invokeInputBufferAPI(codec, index); });
+}
+
+static void onAsyncOutputAvailable(AMediaCodec* codec, void* userdata, int32_t index,
+                                   AMediaCodecBufferInfo* bufferInfo) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    AMediaCodecBufferInfo bufferInfoCopy = *bufferInfo;
+    self->mIOQueue.push([self, codec, index, bufferInfoCopy]() {
+        AMediaCodecBufferInfo bc = bufferInfoCopy;
+        self->invokeOutputBufferAPI(codec, index, &bc);
+    });
+}
+
+static void onAsyncFormatChanged(AMediaCodec* codec, void* userdata, AMediaFormat* format) {
+    (void)codec;
+    (void)userdata;
+    (void)format;
+};
+
+static void onAsyncError(AMediaCodec* codec, void* userdata, media_status_t err, int32_t actionCode,
+                         const char* detail) {
+    CallBackHandle* self = (CallBackHandle*)userdata;
+    self->mSawError = true;
+    self->receiveError();
+    (void)codec;
+    (void)err;
+    (void)actionCode;
+    (void)detail;
+};
+
+class NdkAsyncCodecFuzzer : public NdkMediaCodecFuzzerBase, public CallBackHandle {
+  public:
+    NdkAsyncCodecFuzzer(const uint8_t* data, size_t size)
+        : NdkMediaCodecFuzzerBase(), mFdp(data, size) {
+        setFdp(&mFdp);
+        mStopCodec = false;
+        mSawInputEOS = false;
+        mSignalledError = false;
+        mIsEncoder = false;
+        mNumOfFrames = 0;
+        mNumInputFrames = 0;
+    };
+    ~NdkAsyncCodecFuzzer() {
+        mIOThreadPool->stop();
+        delete (mIOThreadPool);
+    };
+
+    void process();
+
+    static void codecOnFrameRendered(AMediaCodec* codec, void* userdata, int64_t mediaTimeUs,
+                                     int64_t systemNano) {
+        (void)codec;
+        (void)userdata;
+        (void)mediaTimeUs;
+        (void)systemNano;
+    };
+    class ThreadPool {
+      public:
+        void start();
+        void queueJob(const std::function<void()>& job);
+        void stop();
+
+      private:
+        void ThreadLoop();
+        bool mShouldTerminate = false;
+        std::vector<std::thread> mThreads;
+        std::mutex mQueueMutex;
+        std::condition_variable mQueueMutexCondition;
+        std::queue<std::function<void()>> mJobs;
+    };
+
+  private:
+    FuzzedDataProvider mFdp;
+    AMediaCodec* mCodec = nullptr;
+    void invokeCodecCryptoInfoAPI();
+    void invokekAsyncCodecAPIs(bool isEncoder);
+    void invokeAsyncCodeConfigAPI();
+    void invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex);
+    void invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+                               AMediaCodecBufferInfo* bufferInfo);
+    void invokeFormatAPI(AMediaCodec* codec);
+    void receiveError();
+    bool mStopCodec;
+    bool mSawInputEOS;
+    bool mSignalledError;
+    int32_t mNumOfFrames;
+    int32_t mNumInputFrames;
+    mutable Mutex mMutex;
+    bool mIsEncoder;
+    ThreadPool* mIOThreadPool = new ThreadPool();
+};
+
+void NdkAsyncCodecFuzzer::ThreadPool::start() {
+    const uint32_t numThreads = std::thread::hardware_concurrency();
+    mThreads.resize(numThreads);
+    for (uint32_t i = 0; i < numThreads; ++i) {
+        mThreads.at(i) = std::thread(&ThreadPool::ThreadLoop, this);
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::ThreadLoop() {
+    while (true) {
+        std::function<void()> job;
+        {
+            std::unique_lock<std::mutex> lock(mQueueMutex);
+            mQueueMutexCondition.wait(lock, [this] { return !mJobs.empty() || mShouldTerminate; });
+            if (mShouldTerminate) {
+                return;
+            }
+            job = mJobs.front();
+            mJobs.pop();
+        }
+        job();
+    }
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::queueJob(const std::function<void()>& job) {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mJobs.push(job);
+    }
+    mQueueMutexCondition.notify_one();
+}
+
+void NdkAsyncCodecFuzzer::ThreadPool::stop() {
+    {
+        std::unique_lock<std::mutex> lock(mQueueMutex);
+        mShouldTerminate = true;
+    }
+    mQueueMutexCondition.notify_all();
+    for (std::thread& active_thread : mThreads) {
+        active_thread.join();
+    }
+    mThreads.clear();
+}
+
+void NdkAsyncCodecFuzzer::receiveError(void) {
+    mSignalledError = true;
+}
+
+void NdkAsyncCodecFuzzer::invokeInputBufferAPI(AMediaCodec* codec, int32_t bufferIndex) {
+    size_t bufferSize = 0;
+    Mutex::Autolock autoLock(mMutex);
+    if (mSignalledError) {
+        CallBackHandle::mSawError = true;
+        return;
+    }
+    if (mStopCodec || bufferIndex < 0 || mSawInputEOS) {
+        return;
+    }
+
+    uint8_t* buffer = AMediaCodec_getInputBuffer(codec, bufferIndex, &bufferSize);
+    if (buffer) {
+        std::vector<uint8_t> bytesRead = mFdp.ConsumeBytes<uint8_t>(
+                std::min(mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes), bufferSize));
+        memcpy(buffer, bytesRead.data(), bytesRead.size());
+        bufferSize = bytesRead.size();
+    } else {
+        mSignalledError = true;
+        return;
+    }
+
+    uint32_t flag = 0;
+    if (!bufferSize || mNumInputFrames == mNumOfFrames) {
+        flag |= AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
+        mSawInputEOS = true;
+    }
+    AMediaCodec_queueInputBuffer(codec, bufferIndex, 0 /* offset */, bufferSize, 0 /* time */,
+                                 flag);
+    mNumInputFrames++;
+}
+
+void NdkAsyncCodecFuzzer::invokeOutputBufferAPI(AMediaCodec* codec, int32_t bufferIndex,
+                                                AMediaCodecBufferInfo* bufferInfo) {
+    size_t bufferSize = 0;
+    Mutex::Autolock autoLock(mMutex);
+
+    if (mSignalledError) {
+        CallBackHandle::mSawError = true;
+        return;
+    }
+
+    if (mStopCodec || bufferIndex < 0 || mIsDone) {
+        return;
+    }
+
+    if (!mIsEncoder) {
+        (void)AMediaCodec_getOutputBuffer(codec, bufferIndex, &bufferSize);
+    }
+    AMediaCodec_releaseOutputBuffer(codec, bufferIndex, mFdp.ConsumeBool());
+    mIsDone = (0 != (bufferInfo->flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM));
+}
+
+void NdkAsyncCodecFuzzer::invokeFormatAPI(AMediaCodec* codec) {
+    AMediaFormat* codecFormat = nullptr;
+    if (mFdp.ConsumeBool()) {
+        codecFormat = AMediaCodec_getInputFormat(codec);
+    } else {
+        codecFormat = AMediaCodec_getOutputFormat(codec);
+    }
+    if (codecFormat) {
+        AMediaFormat_delete(codecFormat);
+    }
+}
+
+void NdkAsyncCodecFuzzer::invokekAsyncCodecAPIs(bool isEncoder) {
+    ANativeWindow* nativeWindow = nullptr;
+
+    if (mFdp.ConsumeBool()) {
+        AMediaCodec_createInputSurface(mCodec, &nativeWindow);
+    }
+
+    if (AMEDIA_OK == AMediaCodec_configure(mCodec, getCodecFormat(), nativeWindow,
+                                           nullptr /* crypto */,
+                                           (isEncoder ? AMEDIACODEC_CONFIGURE_FLAG_ENCODE : 0))) {
+        mNumOfFrames = mFdp.ConsumeIntegralInRange<size_t>(kMinIterations, kMaxIterations);
+        // Configure codecs to run in async mode.
+        AMediaCodecOnAsyncNotifyCallback callBack = {onAsyncInputAvailable, onAsyncOutputAvailable,
+                                                     onAsyncFormatChanged, onAsyncError};
+        AMediaCodec_setAsyncNotifyCallback(mCodec, callBack, this);
+        mIOThreadPool->queueJob([this] { CallBackHandle::ioThread(); });
+
+        AMediaCodec_start(mCodec);
+        sleep(5);
+        int32_t count = 0;
+        while (++count <= mNumOfFrames) {
+            int32_t ndkcodecAPI =
+                    mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxNdkCodecAPIs);
+            switch (ndkcodecAPI) {
+                case 0: {  // get input and output Format
+                    invokeFormatAPI(mCodec);
+                    break;
+                }
+                case 1: {
+                    AMediaCodec_signalEndOfInputStream(mCodec);
+                    mSawInputEOS = true;
+                    break;
+                }
+                case 2: {  // set parameters
+                    // Create a new parameter and set
+                    AMediaFormat* params = AMediaFormat_new();
+                    AMediaFormat_setInt32(
+                            params, "video-bitrate",
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinIntKeyValue, kMaxIntKeyValue));
+                    AMediaCodec_setParameters(mCodec, params);
+                    AMediaFormat_delete(params);
+                    break;
+                }
+                case 3: {  // flush codec
+                    AMediaCodec_flush(mCodec);
+                    if (mFdp.ConsumeBool()) {
+                        AMediaCodec_start(mCodec);
+                    }
+                    break;
+                }
+                case 4: {
+                    char* name = nullptr;
+                    AMediaCodec_getName(mCodec, &name);
+                    AMediaCodec_releaseName(mCodec, name);
+                    break;
+                }
+                case 5:
+                default: {
+                    std::vector<uint8_t> userData = mFdp.ConsumeBytes<uint8_t>(
+                            mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
+                    AMediaCodecOnFrameRendered callback = codecOnFrameRendered;
+                    AMediaCodec_setOnFrameRenderedCallback(mCodec, callback, userData.data());
+                    break;
+                }
+            }
+        }
+        {
+            Mutex::Autolock autoLock(mMutex);
+            mStopCodec = 1;
+            AMediaCodec_stop(mCodec);
+        }
+    }
+
+    if (nativeWindow) {
+        ANativeWindow_release(nativeWindow);
+    }
+}
+
+void NdkAsyncCodecFuzzer::invokeAsyncCodeConfigAPI() {
+    mIOThreadPool->start();
+
+    while (mFdp.remaining_bytes() > 0) {
+        mIsEncoder = mFdp.ConsumeBool();
+        mCodec = createCodec(mIsEncoder, mFdp.ConsumeBool() /* isCodecForClient */);
+        if (mCodec) {
+            invokekAsyncCodecAPIs(mIsEncoder);
+            AMediaCodec_delete(mCodec);
+        }
+    }
+    mIOThreadPool->stop();
+}
+
+void NdkAsyncCodecFuzzer::invokeCodecCryptoInfoAPI() {
+    while (mFdp.remaining_bytes() > 0) {
+        AMediaCodecCryptoInfo* cryptoInfo = getAMediaCodecCryptoInfo();
+        int32_t ndkCryptoInfoAPI =
+                mFdp.ConsumeIntegralInRange<size_t>(kMinAPICase, kMaxCryptoInfoAPIs);
+        switch (ndkCryptoInfoAPI) {
+            case 0: {
+                size_t sizes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getEncryptedBytes(cryptoInfo, sizes);
+                break;
+            }
+            case 1: {
+                size_t sizes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getClearBytes(cryptoInfo, sizes);
+                break;
+            }
+            case 2: {
+                uint8_t bytes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getIV(cryptoInfo, bytes);
+                break;
+            }
+            case 3:
+            default: {
+                uint8_t bytes[kMaxCryptoKey];
+                AMediaCodecCryptoInfo_getKey(cryptoInfo, bytes);
+                break;
+            }
+        }
+        AMediaCodecCryptoInfo_delete(cryptoInfo);
+    }
+}
+
+void NdkAsyncCodecFuzzer::process() {
+    if (mFdp.ConsumeBool()) {
+        invokeCodecCryptoInfoAPI();
+    } else {
+        invokeAsyncCodeConfigAPI();
+    }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    NdkAsyncCodecFuzzer ndkAsyncCodecFuzzer(data, size);
+    ndkAsyncCodecFuzzer.process();
+    return 0;
+}
diff --git a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
index 2b22f0f..a759ae7 100644
--- a/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_crypto_fuzzer.cpp
@@ -20,10 +20,12 @@
 constexpr size_t kMaxString = 256;
 constexpr size_t kMinBytes = 0;
 constexpr size_t kMaxBytes = 1000;
+constexpr size_t kMaxRuns = 100;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaUUID uuid = {};
+    size_t apiCount = 0;
     int32_t maxLen = fdp.ConsumeIntegralInRange<size_t>(kMinBytes, (size_t)sizeof(AMediaUUID));
     for (size_t idx = 0; idx < maxLen; ++idx) {
         uuid[idx] = fdp.ConsumeIntegral<uint8_t>();
@@ -31,7 +33,14 @@
     std::vector<uint8_t> initData =
             fdp.ConsumeBytes<uint8_t>(fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
     AMediaCrypto* crypto = AMediaCrypto_new(uuid, initData.data(), initData.size());
-    while (fdp.remaining_bytes()) {
+    /*
+     * The AMediaCrypto_isCryptoSchemeSupported API doesn't consume any input bytes,
+     * so when PickValueInArray() selects it repeatedly, only one byte is consumed by 'fdp'.
+     * As a result, on larger inputs, AMediaCrypto_isCryptoSchemeSupported can run a large
+     * number of times, potentially causing a timeout crash.
+     * Therefore, to prevent this issue, while loop is limited to kMaxRuns.
+     */
+    while (fdp.remaining_bytes() && ++apiCount <= kMaxRuns) {
         auto invokeNdkCryptoFuzzer = fdp.PickValueInArray<const std::function<void()>>({
                 [&]() {
                     AMediaCrypto_requiresSecureDecoderComponent(
diff --git a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
index c19ea13..23e2eaf 100644
--- a/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
+++ b/media/ndk/fuzzer/ndk_mediaformat_fuzzer.cpp
@@ -18,6 +18,7 @@
 #include <fcntl.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/NdkMediaFormat.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <sys/mman.h>
 #include <unistd.h>
 #include <utils/Log.h>
@@ -176,11 +177,13 @@
 constexpr size_t kMaxBytes = 1000;
 constexpr size_t kMinChoice = 0;
 constexpr size_t kMaxChoice = 9;
+const size_t kMaxIteration = android::AMessage::maxAllowedEntries();
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     FuzzedDataProvider fdp(data, size);
     AMediaFormat* mediaFormat = AMediaFormat_new();
-    while (fdp.remaining_bytes()) {
+    std::vector<std::string> nameCollection;
+    while (fdp.remaining_bytes() && nameCollection.size() < kMaxIteration) {
         const char* name = nullptr;
         std::string nameString;
         if (fdp.ConsumeBool()) {
@@ -190,6 +193,11 @@
                             : fdp.ConsumeRandomLengthString(
                                       fdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes));
             name = nameString.c_str();
+            std::vector<std::string>::iterator it =
+                    find(nameCollection.begin(), nameCollection.end(), name);
+            if (it == nameCollection.end()) {
+                nameCollection.push_back(name);
+            }
         }
         switch (fdp.ConsumeIntegralInRange<int32_t>(kMinChoice, kMaxChoice)) {
             case 0: {
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index b6dcaae..48a0a82 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -540,9 +540,11 @@
  *
  * @return AMEDIA_OK if the method call succeeds.
  *         AMEDIA_ERROR_INVALID_PARAMETER if reader or handle are NULL.
- *         AMEDIA_ERROR_UNKNOWN if some other error is encountered.
+ *         AMEDIA_ERROR_UNKNOWN if some other error is encountered or
+ *         the device no longer has android.hidl.token service to
+ *         satisfy the request because it is deprecated.
  */
-media_status_t AImageReader_getWindowNativeHandle(
+[[deprecated]] media_status_t AImageReader_getWindowNativeHandle(
     AImageReader *reader, /* out */native_handle_t **handle);
 #endif
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index 9e0d5e4..be8b2b4 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -322,7 +322,7 @@
             ByteBuffer inputCodecBuffer = mediaCodec.getInputBuffer(inputBufferId);
             BufferInfo bufInfo;
             if (mNumInFramesProvided >= mNumInFramesRequired) {
-                Log.i(TAG, "Input frame limit reached");
+                Log.i(TAG, "Input frame limit reached provided: " + mNumInFramesProvided);
                 mIndex = mInputBufferInfo.size() - 1;
                 bufInfo = mInputBufferInfo.get(mIndex);
                 if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
index 84554d3..306aeee 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -21,6 +21,7 @@
 import androidx.annotation.NonNull;
 import java.nio.ByteBuffer;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 public class FrameReleaseQueue {
     private static final String TAG = "FrameReleaseQueue";
@@ -28,7 +29,7 @@
     private MediaCodec mCodec;
     private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
     private ReleaseThread mReleaseThread;
-    private boolean doFrameRelease = false;
+    private AtomicBoolean doFrameRelease = new AtomicBoolean(false);
     private boolean mRender = false;
     private int mWaitTime = 40; // milliseconds per frame
     private int mWaitTimeCorrection = 0;
@@ -49,19 +50,19 @@
 
     private class ReleaseThread extends Thread {
         public void run() {
-            int nextReleaseTime = 0;
+            long nextReleaseTime = 0;
             int loopCount = 0;
-            while (doFrameRelease || mFrameInfoQueue.size() > 0) {
+            while (doFrameRelease.get() || mFrameInfoQueue.size() > 0) {
                 FrameInfo curFrameInfo = mFrameInfoQueue.peek();
                 if (curFrameInfo == null) {
                     nextReleaseTime += mWaitTime;
                 } else {
-                    if (curFrameInfo.displayTime == 0) {
+                    if (firstReleaseTime == -1 || curFrameInfo.displayTime <= 0) {
                         // first frame of loop
                         firstReleaseTime = getCurSysTime();
                         nextReleaseTime = firstReleaseTime + mWaitTime;
                         popAndRelease(curFrameInfo, true);
-                    } else if (!doFrameRelease && mFrameInfoQueue.size() == 1) {
+                    } else if (!doFrameRelease.get() && mFrameInfoQueue.size() == 1) {
                         // EOS
                         Log.i(TAG, "EOS");
                         popAndRelease(curFrameInfo, false);
@@ -83,12 +84,13 @@
                         }
                         if (curFrameInfo != null && curFrameInfo.displayTime > curMediaTime) {
                             if ((curFrameInfo.displayTime - curMediaTime) < THRESHOLD_TIME) {
+                                // release the frame now as we are already there
                                 popAndRelease(curFrameInfo, true);
                             }
                         }
                     }
                 }
-                int sleepTime = nextReleaseTime - getCurSysTime();
+                long sleepTime = nextReleaseTime - getCurSysTime();
                 if (sleepTime > 0) {
                     try {
                         mReleaseThread.sleep(sleepTime);
@@ -109,7 +111,7 @@
     public FrameReleaseQueue(boolean render, int frameRate) {
         this.mFrameInfoQueue = new LinkedBlockingQueue();
         this.mReleaseThread = new ReleaseThread();
-        this.doFrameRelease = true;
+        this.doFrameRelease.set(true);
         this.mRender = render;
         this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
         int waitTimeRemainder = 1000 % frameRate;
@@ -163,7 +165,7 @@
     }
 
     public void stopFrameRelease() {
-        doFrameRelease = false;
+        doFrameRelease.set(false);
         try {
             mReleaseThread.join();
             Log.i(TAG, "Joined frame release thread");
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 7abb0b6..07dac5e 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -179,3 +179,8 @@
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
 }
+
+cc_library_headers {
+    name: "mediautils_headers",
+    export_include_dirs: ["include", "."],
+}
diff --git a/media/utils/include/mediautils/SharedMemoryAllocator.h b/media/utils/include/mediautils/SharedMemoryAllocator.h
index 79621e2..4243b9c 100644
--- a/media/utils/include/mediautils/SharedMemoryAllocator.h
+++ b/media/utils/include/mediautils/SharedMemoryAllocator.h
@@ -138,7 +138,7 @@
 template <typename Allocator>
 class ScopedAllocator {
   public:
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     explicit ScopedAllocator(const std::shared_ptr<Allocator>& allocator) : mAllocator(allocator) {}
 
@@ -218,7 +218,7 @@
 template <typename Allocator, typename Policy>
 class PolicyAllocator {
   public:
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     PolicyAllocator(Allocator allocator, Policy policy)
         : mAllocator(allocator), mPolicy(std::move(policy)) {}
@@ -277,7 +277,7 @@
         std::string name;
         size_t allocation_number;
     };
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     SnoopingAllocator(Allocator allocator, std::string_view name)
         : mName(name), mAllocator(std::move(allocator)) {}
@@ -362,16 +362,19 @@
 template <class PrimaryAllocator, class SecondaryAllocator>
 class FallbackAllocator {
   public:
-    static_assert(PrimaryAllocator::alignment() == SecondaryAllocator::alignment());
     static_assert(shared_allocator_impl::has_owns<PrimaryAllocator>);
 
-    static constexpr size_t alignment() { return PrimaryAllocator::alignment(); }
+    static size_t alignment() { return PrimaryAllocator::alignment(); }
 
     FallbackAllocator(const PrimaryAllocator& primary, const SecondaryAllocator& secondary)
-        : mPrimary(primary), mSecondary(secondary) {}
+        : mPrimary(primary), mSecondary(secondary) {
+      verify_alignment();
+    }
 
     // Default construct primary and secondary allocator
-    FallbackAllocator() = default;
+    FallbackAllocator() {
+      verify_alignment();
+    }
 
     template <typename T>
     AllocationType allocate(T&& request) {
@@ -414,6 +417,10 @@
     }
 
   private:
+    void verify_alignment() {
+      LOG_ALWAYS_FATAL_IF(PrimaryAllocator::alignment() != SecondaryAllocator::alignment(),
+                          "PrimaryAllocator::alignment() != SecondaryAllocator::alignment()");
+    }
     [[no_unique_address]] PrimaryAllocator mPrimary;
     [[no_unique_address]] SecondaryAllocator mSecondary;
 };
@@ -423,7 +430,7 @@
 template <typename Allocator>
 class IndirectAllocator {
   public:
-    static constexpr size_t alignment() { return Allocator::alignment(); }
+    static size_t alignment() { return Allocator::alignment(); }
 
     explicit IndirectAllocator(const std::shared_ptr<Allocator>& allocator)
         : mAllocator(allocator) {}
@@ -449,7 +456,7 @@
 // a shared memory mapped anonymous file) as allocations.
 class MemoryHeapBaseAllocator {
   public:
-    static constexpr size_t alignment() { return 4096; /* PAGE_SIZE */ }
+    static size_t alignment() { return kPageSize; }
     static constexpr unsigned FLAGS = 0;  // default flags
 
     template <typename T>
@@ -475,5 +482,7 @@
         // allocated by us, the underlying IMemoryHeap was a MemoryHeapBase
         static_cast<MemoryHeapBase&>(*heap).dispose();
     }
+  private:
+    static inline const size_t kPageSize = getpagesize();
 };
 }  // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index 0689083..3fdc6eb 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -200,7 +200,10 @@
     name: "timerthread_tests",
 
     defaults: ["libmediautils_tests_defaults"],
-
+    // TODO(b/270180838)
+    test_options: {
+        unit_test: false,
+    },
     srcs: [
         "TimerThread-test.cpp",
     ],
diff --git a/media/utils/tests/shared_memory_allocator_tests.cpp b/media/utils/tests/shared_memory_allocator_tests.cpp
index 11bc72a..c164cbd 100644
--- a/media/utils/tests/shared_memory_allocator_tests.cpp
+++ b/media/utils/tests/shared_memory_allocator_tests.cpp
@@ -19,21 +19,25 @@
 #include <gtest/gtest.h>
 #include <mediautils/SharedMemoryAllocator.h>
 #include <sys/stat.h>
+#include <unistd.h>
 #include <utils/Log.h>
 
 using namespace android;
 using namespace android::mediautils;
 
 namespace {
+const size_t kPageSize = getpagesize();
+constexpr size_t kMaxPageSize = 65536;  // arm64 supports 4k, 16k and 64k pages
+
 void validate_block(const AllocationType& block) {
     ASSERT_TRUE(block != nullptr);
-    memset(block->unsecurePointer(), 10, 4096);
+    memset(block->unsecurePointer(), 10, kPageSize);
     EXPECT_EQ(*(static_cast<char*>(block->unsecurePointer()) + 100), static_cast<char>(10));
 }
 
 template <size_t N = 0, bool FatalOwn = true>
 struct ValidateForwarding {
-    static constexpr size_t alignment() { return 1337; }
+    static size_t alignment() { return 1337; }
 
     bool owns(const AllocationType& allocation) const {
         if (allocation == owned) return true;
@@ -48,9 +52,9 @@
 
     static inline size_t deallocate_all_count = 0;
     static inline const AllocationType owned =
-            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{kMaxPageSize});
     static inline const AllocationType not_owned =
-            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{4096});
+            MemoryHeapBaseAllocator().allocate(BasicAllocRequest{kMaxPageSize});
     static inline const std::string dump_string = std::to_string(N) + "Test Dump Forwarding";
 };
 
@@ -64,17 +68,14 @@
         shared_allocator_impl::has_deallocate_all<SnoopingAllocator<MemoryHeapBaseAllocator>> ==
         true);
 static_assert(
-        shared_allocator_impl::has_owns<
-                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
-        true);
+        shared_allocator_impl::has_owns<PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+                                                        SizePolicy<kMaxPageSize>>> == true);
 static_assert(
-        shared_allocator_impl::has_dump<
-                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
-        true);
-static_assert(
-        shared_allocator_impl::has_deallocate_all<
-                PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<4096>>> ==
-        true);
+        shared_allocator_impl::has_dump<PolicyAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
+                                                        SizePolicy<kMaxPageSize>>> == true);
+static_assert(shared_allocator_impl::has_deallocate_all<PolicyAllocator<
+                      SnoopingAllocator<MemoryHeapBaseAllocator>, SizePolicy<kMaxPageSize>>> ==
+              true);
 static_assert(shared_allocator_impl::has_owns<
                       FallbackAllocator<SnoopingAllocator<MemoryHeapBaseAllocator>,
                                         SnoopingAllocator<MemoryHeapBaseAllocator>>> == true);
@@ -93,7 +94,7 @@
     const auto memory = allocator.allocate(BasicAllocRequest{500});
     ASSERT_TRUE(memory != nullptr);
     const auto fd = dup(memory->getMemory()->getHeapID());
-    EXPECT_EQ(memory->size(), static_cast<unsigned>(4096));
+    EXPECT_EQ(memory->size(), static_cast<unsigned>(kPageSize));
     EXPECT_EQ(memory->size(), memory->getMemory()->getSize());
     validate_block(memory);
     allocator.deallocate(memory);
@@ -108,7 +109,7 @@
 }
 
 TEST(shared_memory_allocator_tests, mheapbase_allocator_independence) {
-    static_assert(MemoryHeapBaseAllocator::alignment() == 4096);
+    ASSERT_EQ(MemoryHeapBaseAllocator::alignment(), kPageSize);
     MemoryHeapBaseAllocator allocator;
     const auto first_memory = allocator.allocate(BasicAllocRequest{500});
     const auto second_memory = allocator.allocate(BasicAllocRequest{500});
@@ -120,8 +121,8 @@
 }
 
 TEST(shared_memory_allocator_tests, snooping_allocator) {
-    static_assert(SnoopingAllocator<ValidateForwarding<0>>::alignment() ==
-                  ValidateForwarding<0>::alignment());
+    ASSERT_EQ(SnoopingAllocator<ValidateForwarding<0>>::alignment(),
+              ValidateForwarding<0>::alignment());
 
     SnoopingAllocator<MemoryHeapBaseAllocator> allocator{"allocator"};
     const auto first_memory = allocator.allocate(NamedAllocRequest{{500}, "allocate_1"});
@@ -165,29 +166,29 @@
 // TODO generic policy test
 TEST(shared_memory_allocator_tests, size_policy_allocator_enforcement) {
     PolicyAllocator allocator{MemoryHeapBaseAllocator{},
-                              SizePolicy<4096 * 7, 4096 * 2, 4096 * 4>{}};
+                              SizePolicy<kMaxPageSize * 7, kMaxPageSize * 2, kMaxPageSize * 4>{}};
     // Violate max size
-    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 5}) == nullptr);
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize * 5}) == nullptr);
     // Violate min alloc size
-    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096}) == nullptr);
-    const auto first_memory = allocator.allocate(BasicAllocRequest{4096 * 4});
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize}) == nullptr);
+    const auto first_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 4});
     validate_block(first_memory);
     // Violate pool size
-    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{4096 * 4}) == nullptr);
-    const auto second_memory = allocator.allocate(BasicAllocRequest{4096 * 3});
+    EXPECT_TRUE(allocator.allocate(BasicAllocRequest{kMaxPageSize * 4}) == nullptr);
+    const auto second_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 3});
     validate_block(second_memory);
     allocator.deallocate(second_memory);
     // Check pool size update after deallocation
-    const auto new_second_memory = allocator.allocate(BasicAllocRequest{4096 * 2});
+    const auto new_second_memory = allocator.allocate(BasicAllocRequest{kMaxPageSize * 2});
     validate_block(new_second_memory);
 }
 
 TEST(shared_memory_allocator_tests, indirect_allocator) {
-    static_assert(IndirectAllocator<ValidateForwarding<0>>::alignment() ==
-                  ValidateForwarding<0>::alignment());
+    ASSERT_EQ(IndirectAllocator<ValidateForwarding<0>>::alignment(),
+              ValidateForwarding<0>::alignment());
     const auto allocator_handle = std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>();
     IndirectAllocator allocator{allocator_handle};
-    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    const auto memory = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation"});
     EXPECT_TRUE(allocator_handle->owns(memory));
     EXPECT_TRUE(allocator_handle->getAllocations().size() == 1);
     allocator.deallocate(memory);
@@ -199,35 +200,37 @@
     // Test appropriate forwarding of allocator, deallocate
     const auto primary_allocator =
             std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("allocator");
-    PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<4096>{}};
-    const auto memory = allocator.allocate(NamedAllocRequest{{4096}, "allocation"});
+    PolicyAllocator allocator{IndirectAllocator(primary_allocator), SizePolicy<kMaxPageSize>{}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation"});
     EXPECT_TRUE(primary_allocator->owns(memory));
     const auto& allocations = primary_allocator->getAllocations();
     EXPECT_TRUE(allocations.size() == 1);
     allocator.deallocate(memory);
     EXPECT_TRUE(allocations.size() == 0);
-    const auto memory2 = allocator.allocate(NamedAllocRequest{{4096}, "allocation_2"});
+    const auto memory2 = allocator.allocate(NamedAllocRequest{{kPageSize}, "allocation_2"});
     EXPECT_TRUE(allocations.size() == 1);
     EXPECT_TRUE(primary_allocator->owns(memory2));
     allocator.deallocate(memory2);
     EXPECT_FALSE(primary_allocator->owns(memory2));
     EXPECT_TRUE(allocations.size() == 0);
     // Test appropriate forwarding of own, dump, alignment, deallocate_all
-    PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<4096>{}};
+    PolicyAllocator allocator2{ValidateForwarding<0>{}, SizePolicy<kMaxPageSize>{}};
     EXPECT_TRUE(allocator2.owns(ValidateForwarding<0>::owned));
     EXPECT_FALSE(allocator2.owns(ValidateForwarding<0>::not_owned));
     EXPECT_TRUE(allocator2.dump().find(ValidateForwarding<0>::dump_string) != std::string::npos);
-    static_assert(decltype(allocator2)::alignment() == ValidateForwarding<0>::alignment());
+    ASSERT_EQ(decltype(allocator2)::alignment(), ValidateForwarding<0>::alignment());
     size_t prev = ValidateForwarding<0>::deallocate_all_count;
     allocator2.deallocate_all();
     EXPECT_EQ(ValidateForwarding<0>::deallocate_all_count, prev + 1);
 }
 
 TEST(shared_memory_allocator_tests, snooping_allocator_nullptr) {
-    SnoopingAllocator allocator{PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<4096 * 2>{}}};
-    const auto memory = allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    SnoopingAllocator allocator{
+            PolicyAllocator{MemoryHeapBaseAllocator{}, SizePolicy<kMaxPageSize * 2>{}}};
+    const auto memory = allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_1"});
     validate_block(memory);
-    ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{5000}, "allocation_2"}) == nullptr);
+    ASSERT_TRUE(allocator.allocate(NamedAllocRequest{{kMaxPageSize * 3}, "allocation_2"}) ==
+                nullptr);
     const auto& allocations = allocator.getAllocations();
     EXPECT_EQ(allocations.size(), 1ul);
     for (const auto& [key, val] : allocations) {
@@ -240,23 +243,26 @@
 TEST(shared_memory_allocator_tests, fallback_allocator) {
     // Construct Fallback Allocator
     const auto primary_allocator = std::make_shared<
-            SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>>>(
-            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<4096>>{}, "primary_allocator");
+            SnoopingAllocator<PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<kMaxPageSize>>>>(
+            PolicyAllocator<MemoryHeapBaseAllocator, SizePolicy<kMaxPageSize>>{},
+            "primary_allocator");
     const auto secondary_allocator =
             std::make_shared<SnoopingAllocator<MemoryHeapBaseAllocator>>("secondary_allocator");
 
     FallbackAllocator fallback_allocator{SnoopingAllocator{IndirectAllocator{primary_allocator}},
                                          SnoopingAllocator{IndirectAllocator{secondary_allocator}}};
-    static_assert(decltype(fallback_allocator)::alignment() == 4096);
+    ASSERT_EQ(decltype(fallback_allocator)::alignment(), kPageSize);
     // Basic Allocation Test
-    const auto memory = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_1"});
+    const auto memory =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_1"});
     validate_block(memory);
     // Correct allocator selected
     EXPECT_TRUE(fallback_allocator.owns(memory));
     EXPECT_TRUE(primary_allocator->owns(memory));
     EXPECT_FALSE(secondary_allocator->owns(memory));
     // Test fallback allocation
-    const auto memory2 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_2"});
+    const auto memory2 =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_2"});
     validate_block(memory2);
     // Correct allocator selected
     EXPECT_TRUE(fallback_allocator.owns(memory2));
@@ -276,7 +282,8 @@
     EXPECT_TRUE(primary_allocator->getAllocations().size() == 0ul);
     EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
     // Appropriate fallback after deallocation
-    const auto memory3 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_3"});
+    const auto memory3 =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_3"});
     EXPECT_TRUE(fallback_allocator.owns(memory3));
     EXPECT_TRUE(primary_allocator->owns(memory3));
     EXPECT_FALSE(secondary_allocator->owns(memory3));
@@ -285,7 +292,8 @@
     EXPECT_TRUE(secondary_allocator->getAllocations().size() == 1ul);
     fallback_allocator.deallocate(memory2);
     EXPECT_TRUE(secondary_allocator->getAllocations().size() == 0ul);
-    const auto memory4 = fallback_allocator.allocate(NamedAllocRequest{{3000}, "allocation_4"});
+    const auto memory4 =
+            fallback_allocator.allocate(NamedAllocRequest{{kMaxPageSize}, "allocation_4"});
     EXPECT_TRUE(fallback_allocator.owns(memory4));
     EXPECT_FALSE(primary_allocator->owns(memory4));
     EXPECT_TRUE(secondary_allocator->owns(memory4));
@@ -311,7 +319,7 @@
     EXPECT_FALSE(forward_test.owns(Alloc1::not_owned));
     EXPECT_FALSE(forward_test.owns(Alloc2::not_owned));
     // Test alignment forwarding
-    static_assert(FallbackAllocator<Alloc1, Alloc2>::alignment() == Alloc1::alignment());
+    ASSERT_EQ(decltype(forward_test)::alignment(), Alloc1::alignment());
     // Test deallocate_all forwarding
     size_t prev1 = Alloc1::deallocate_all_count;
     size_t prev2 = Alloc2::deallocate_all_count;
@@ -343,8 +351,8 @@
     }
     EXPECT_EQ(allocations.size(), 0ul);
     // Test forwarding
-    static_assert(ScopedAllocator<ValidateForwarding<0>>::alignment() ==
-                  ValidateForwarding<0>::alignment());
+    ASSERT_EQ(ScopedAllocator<ValidateForwarding<0>>::alignment(),
+              ValidateForwarding<0>::alignment());
     ScopedAllocator<ValidateForwarding<0>> forwarding{};
     EXPECT_EQ(forwarding.dump(), ValidateForwarding<0>::dump_string);
 }
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 0cd6243..7663250 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -138,10 +138,57 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libaudioflinger_dependencies",
+
+    shared_libs: [
+        "audioflinger-aidl-cpp",
+        "audioclient-types-aidl-cpp",
+        "av-types-aidl-cpp",
+        "effect-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "libactivitymanager_aidl",
+        "libaudioflinger_datapath",
+        "libaudioflinger_fastpath",
+        "libaudioflinger_timing",
+        "libaudioflinger_utils",
+        "libaudiofoundation",
+        "libaudiohal",
+        "libaudioprocessing",
+        "libaudioutils",
+        "libcutils",
+        "libutils",
+        "liblog",
+        "libbinder",
+        "libbinder_ndk",
+        "libaudioclient",
+        "libaudiomanager",
+        "libmediametrics",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpermission",
+        "libpowermanager",
+        "libmemunreachable",
+        "libmedia_helper",
+        "libshmemcompat",
+        "libsounddose",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libmedialogservice",
+        "libaudiospdif",
+    ],
+}
+
+
+cc_library {
     name: "libaudioflinger",
 
     defaults: [
+        "libaudioflinger_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "audioflinger_flags_defaults",
@@ -164,44 +211,6 @@
         "frameworks/av/services/medialog",
     ],
 
-    shared_libs: [
-        "audioflinger-aidl-cpp",
-        "audioclient-types-aidl-cpp",
-        "av-types-aidl-cpp",
-        "effect-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "libactivitymanager_aidl",
-        "libaudioflinger_datapath",
-        "libaudioflinger_fastpath",
-        "libaudioflinger_timing",
-        "libaudioflinger_utils",
-        "libaudiofoundation",
-        "libaudiohal",
-        "libaudioprocessing",
-        "libaudiospdif",
-        "libaudioutils",
-        "libcutils",
-        "libutils",
-        "liblog",
-        "libbinder",
-        "libbinder_ndk",
-        "libaudioclient",
-        "libaudiomanager",
-        "libmedialogservice",
-        "libmediametrics",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpermission",
-        "libpowermanager",
-        "libmemunreachable",
-        "libmedia_helper",
-        "libshmemcompat",
-        "libsounddose",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
-    ],
-
     static_libs: [
         "libcpustats",
         "libpermission",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index fdc1b97..761270b 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1096,7 +1096,7 @@
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
                                       &lStatus, portId, input.audioTrackCallback, isSpatialized,
-                                      isBitPerfect);
+                                      isBitPerfect, &output.afTrackFlags);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -2094,6 +2094,9 @@
         }
         if (removed) {
             removedEffects = purgeStaleEffects_l();
+            std::vector< sp<IAfEffectModule> > removedOrphanEffects = purgeOrphanEffectChains_l();
+            removedEffects.insert(removedEffects.end(), removedOrphanEffects.begin(),
+                    removedOrphanEffects.end());
         }
     }
     for (auto& effect : removedEffects) {
@@ -3550,6 +3553,42 @@
     return removedEffects;
 }
 
+std::vector< sp<IAfEffectModule> > AudioFlinger::purgeOrphanEffectChains_l()
+{
+    ALOGV("purging stale effects from orphan chains");
+    std::vector< sp<IAfEffectModule> > removedEffects;
+    for (size_t index = 0; index < mOrphanEffectChains.size(); index++) {
+        sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
+        audio_session_t session = mOrphanEffectChains.keyAt(index);
+        if (session == AUDIO_SESSION_OUTPUT_MIX || session == AUDIO_SESSION_DEVICE
+                || session == AUDIO_SESSION_OUTPUT_STAGE) {
+            continue;
+        }
+        size_t numSessionRefs = mAudioSessionRefs.size();
+        bool found = false;
+        for (size_t k = 0; k < numSessionRefs; k++) {
+            AudioSessionRef *ref = mAudioSessionRefs.itemAt(k);
+            if (ref->mSessionid == session) {
+                ALOGV(" session %d still exists for %d with %d refs", session, ref->mPid,
+                        ref->mCnt);
+                found = true;
+                break;
+            }
+        }
+        if (!found) {
+            for (size_t i = 0; i < chain->numberOfEffects(); i++) {
+                sp<IAfEffectModule> effect = chain->getEffectModule(i);
+                removedEffects.push_back(effect);
+            }
+        }
+    }
+    for (auto& effect : removedEffects) {
+        effect->unPin();
+        updateOrphanEffectChains_l(effect);
+    }
+    return removedEffects;
+}
+
 // dumpToThreadLog_l() must be called with AudioFlinger::mLock held
 void AudioFlinger::dumpToThreadLog_l(const sp<IAfThreadBase> &thread)
 {
@@ -4275,7 +4314,7 @@
 
     response->id = idOut;
     response->enabled = enabledOut != 0;
-    response->effect = handle->asIEffect();
+    response->effect = handle.get() ? handle->asIEffect() : nullptr;
     response->desc = VALUE_OR_RETURN_STATUS(
             legacy2aidl_effect_descriptor_t_EffectDescriptor(descOut));
 
@@ -4283,24 +4322,42 @@
     return lStatus;
 }
 
-status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcOutput,
-        audio_io_handle_t dstOutput)
+status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+        audio_io_handle_t dstIo)
+NO_THREAD_SAFETY_ANALYSIS
 {
-    ALOGV("%s() session %d, srcOutput %d, dstOutput %d",
-            __func__, sessionId, srcOutput, dstOutput);
+    ALOGV("%s() session %d, srcIo %d, dstIo %d", __func__, sessionId, srcIo, dstIo);
     Mutex::Autolock _l(mLock);
-    if (srcOutput == dstOutput) {
-        ALOGW("%s() same dst and src outputs %d", __func__, dstOutput);
+    if (srcIo == dstIo) {
+        ALOGW("%s() same dst and src outputs %d", __func__, dstIo);
         return NO_ERROR;
     }
-    IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcOutput);
+    IAfRecordThread* const srcRecordThread = checkRecordThread_l(srcIo);
+    IAfRecordThread* const dstRecordThread = checkRecordThread_l(dstIo);
+    if (srcRecordThread != nullptr || dstRecordThread != nullptr) {
+        if (srcRecordThread != nullptr) {
+            srcRecordThread->mutex().lock();
+        }
+        if (dstRecordThread != nullptr) {
+            dstRecordThread->mutex().lock();
+        }
+        status_t ret = moveEffectChain_l(sessionId, srcRecordThread, dstRecordThread);
+        if (srcRecordThread != nullptr) {
+            srcRecordThread->mutex().unlock();
+        }
+        if (dstRecordThread != nullptr) {
+            dstRecordThread->mutex().unlock();
+        }
+        return ret;
+    }
+    IAfPlaybackThread* const srcThread = checkPlaybackThread_l(srcIo);
     if (srcThread == nullptr) {
-        ALOGW("%s() bad srcOutput %d", __func__, srcOutput);
+        ALOGW("%s() bad srcIo %d", __func__, srcIo);
         return BAD_VALUE;
     }
-    IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstOutput);
+    IAfPlaybackThread* const dstThread = checkPlaybackThread_l(dstIo);
     if (dstThread == nullptr) {
-        ALOGW("%s() bad dstOutput %d", __func__, dstOutput);
+        ALOGW("%s() bad dstIo %d", __func__, dstIo);
         return BAD_VALUE;
     }
 
@@ -4437,6 +4494,49 @@
     return status;
 }
 
+
+// moveEffectChain_l must be called with both srcThread (if not null) and dstThread (if not null)
+// mLocks held
+status_t AudioFlinger::moveEffectChain_l(audio_session_t sessionId,
+        IAfRecordThread* srcThread, IAfRecordThread* dstThread)
+NO_THREAD_SAFETY_ANALYSIS // requires srcThread and dstThread locks
+{
+    sp<IAfEffectChain> chain = nullptr;
+    if (srcThread != 0) {
+        const Vector<sp<IAfEffectChain>> effectChains = srcThread->getEffectChains_l();
+        for (size_t i = 0; i < effectChains.size(); i ++) {
+             if (effectChains[i]->sessionId() == sessionId) {
+                 chain = effectChains[i];
+                 break;
+             }
+        }
+        ALOGV_IF(effectChains.size() == 0, "%s: no effect chain on io=%d", __func__,
+                srcThread->id());
+        if (chain == nullptr) {
+            ALOGE("%s wrong session id %d", __func__, sessionId);
+            return BAD_VALUE;
+        }
+        ALOGV("%s: removing effect chain for session=%d io=%d", __func__, sessionId,
+                srcThread->id());
+        srcThread->removeEffectChain_l(chain);
+    } else {
+        chain = getOrphanEffectChain_l(sessionId);
+        if (chain == nullptr) {
+            ALOGE("%s: no orphan effect chain found for session=%d", __func__, sessionId);
+            return BAD_VALUE;
+        }
+    }
+    if (dstThread != 0) {
+        ALOGV("%s: adding effect chain for session=%d on io=%d", __func__, sessionId,
+                dstThread->id());
+        dstThread->addEffectChain_l(chain);
+        return NO_ERROR;
+    }
+    ALOGV("%s: parking to orphan effect chain for session=%d", __func__, sessionId);
+    putOrphanEffectChain_l(chain);
+    return NO_ERROR;
+}
+
 status_t AudioFlinger::moveAuxEffectToIo(int EffectId,
         const sp<IAfPlaybackThread>& dstThread, sp<IAfPlaybackThread>* srcThread)
 {
@@ -4552,6 +4652,11 @@
 bool AudioFlinger::updateOrphanEffectChains(const sp<IAfEffectModule>& effect)
 {
     Mutex::Autolock _l(mLock);
+    return updateOrphanEffectChains_l(effect);
+}
+
+bool AudioFlinger::updateOrphanEffectChains_l(const sp<IAfEffectModule>& effect)
+{
     audio_session_t session = effect->sessionId();
     ssize_t index = mOrphanEffectChains.indexOfKey(session);
     ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index ef36d50..9aefb6b 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -505,6 +505,9 @@
     // used by IAfDeviceEffectManagerCallback, IAfPatchPanelCallback, IAfThreadCallback
     audio_unique_id_t nextUniqueId(audio_unique_id_use_t use) final;
 
+              status_t moveEffectChain_l(audio_session_t sessionId,
+            IAfRecordThread* srcThread, IAfRecordThread* dstThread);
+
               // return thread associated with primary hardware device, or NULL
               DeviceTypeSet primaryOutputDevice_l() const;
 
@@ -536,6 +539,9 @@
 
                 std::vector< sp<IAfEffectModule> > purgeStaleEffects_l();
 
+                std::vector< sp<IAfEffectModule> > purgeOrphanEffectChains_l();
+                bool updateOrphanEffectChains_l(const sp<IAfEffectModule>& effect);
+
                 void broadcastParametersToRecordThreads_l(const String8& keyValuePairs);
                 void forwardParametersToDownstreamPatches_l(
                         audio_io_handle_t upStream, const String8& keyValuePairs,
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index ebb2428..681ec5f 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -60,18 +60,37 @@
             __func__, handle, patch.mHalHandle,
             patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
     Mutex::Autolock _l(mLock);
-    for (auto& effect : mDeviceEffects) {
-        status_t status = effect.second->onCreatePatch(handle, patch);
-        ALOGV("%s Effect onCreatePatch status %d", __func__, status);
-        ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+    for (auto& effectProxies : mDeviceEffects) {
+        for (auto& effect : effectProxies.second) {
+            const status_t status = effect->onCreatePatch(handle, patch);
+            ALOGV("%s Effect onCreatePatch status %d", __func__, status);
+            ALOGW_IF(status == BAD_VALUE, "%s onCreatePatch error %d", __func__, status);
+        }
     }
 }
 
 void DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
     Mutex::Autolock _l(mLock);
-    for (auto& effect : mDeviceEffects) {
-        effect.second->onReleasePatch(handle);
+    for (auto& effectProxies : mDeviceEffects) {
+        for (auto& effect : effectProxies.second) {
+            effect->onReleasePatch(handle);
+        }
+    }
+}
+
+void DeviceEffectManager::onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+        audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+    ALOGV("%s oldhandle %d newHandle %d mHalHandle %d device sink %08x",
+            __func__, oldHandle, newHandle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+    Mutex::Autolock _l(mLock);
+    for (auto& effectProxies : mDeviceEffects) {
+        for (auto& effect : effectProxies.second) {
+            const status_t status = effect->onUpdatePatch(oldHandle, newHandle, patch);
+            ALOGV("%s Effect onUpdatePatch status %d", __func__, status);
+            ALOGW_IF(status != NO_ERROR, "%s onUpdatePatch error %d", __func__, status);
+        }
     }
 }
 
@@ -87,6 +106,7 @@
         bool probe,
         bool notifyFramesProcessed) {
     sp<IAfDeviceEffectProxy> effect;
+    std::vector<sp<IAfDeviceEffectProxy>> effectsForDevice = {};
     sp<IAfEffectHandle> handle;
     status_t lStatus;
 
@@ -100,12 +120,21 @@
         Mutex::Autolock _l(mLock);
         auto iter = mDeviceEffects.find(device);
         if (iter != mDeviceEffects.end()) {
-            effect = iter->second;
-        } else {
+            effectsForDevice = iter->second;
+            for (const auto& iterEffect : effectsForDevice) {
+                if (memcmp(&iterEffect->desc().uuid, &descriptor->uuid, sizeof(effect_uuid_t)) ==
+                    0) {
+                    effect = iterEffect;
+                    break;
+                }
+            }
+        }
+        if (effect == nullptr) {
             effect = IAfDeviceEffectProxy::create(device, mMyCallback,
                     descriptor,
                     mAfDeviceEffectManagerCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT),
                     notifyFramesProcessed);
+            effectsForDevice.push_back(effect);
         }
         // create effect handle and connect it to effect module
         handle = IAfEffectHandle::create(
@@ -119,7 +148,8 @@
                     lStatus = NO_ERROR;
                 }
                 if (lStatus == NO_ERROR || lStatus == ALREADY_EXISTS) {
-                    mDeviceEffects.emplace(device, effect);
+                    mDeviceEffects.erase(device);
+                    mDeviceEffects.emplace(device, effectsForDevice);
                 }
             }
         }
@@ -185,8 +215,10 @@
         String8 outStr;
         outStr.appendFormat("%*sEffect for device %s address %s:\n", 2, "",
                 ::android::toString(iter.first.mType).c_str(), iter.first.getAddress());
-        write(fd, outStr.string(), outStr.size());
-        iter.second->dump2(fd, 4);
+        for (const auto& effect : iter.second) {
+            write(fd, outStr.string(), outStr.size());
+            effect->dump2(fd, 4);
+        }
     }
 
     if (locked) {
@@ -197,7 +229,20 @@
 size_t DeviceEffectManager::removeEffect(const sp<IAfDeviceEffectProxy>& effect)
 {
     Mutex::Autolock _l(mLock);
-    mDeviceEffects.erase(effect->device());
+    const auto& iter = mDeviceEffects.find(effect->device());
+    if (iter != mDeviceEffects.end()) {
+        const auto& iterEffect = std::find_if(
+                iter->second.begin(), iter->second.end(), [&effect](const auto& effectProxy) {
+                    return memcmp(&effectProxy->desc().uuid, &effect->desc().uuid,
+                            sizeof(effect_uuid_t)) == 0;
+                });
+        if (iterEffect != iter->second.end()) {
+            iter->second.erase(iterEffect);
+            if (iter->second.empty()) {
+                mDeviceEffects.erase(effect->device());
+            }
+        }
+    }
     return mDeviceEffects.size();
 }
 
diff --git a/services/audioflinger/DeviceEffectManager.h b/services/audioflinger/DeviceEffectManager.h
index 6111030..cb7fad1 100644
--- a/services/audioflinger/DeviceEffectManager.h
+++ b/services/audioflinger/DeviceEffectManager.h
@@ -73,6 +73,9 @@
     void onCreateAudioPatch(audio_patch_handle_t handle,
             const IAfPatchPanel::Patch& patch) final;
     void onReleaseAudioPatch(audio_patch_handle_t handle) final;
+    void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+                            audio_patch_handle_t newHandle,
+                            const IAfPatchPanel::Patch& patch) final;
 
 private:
     status_t checkEffectCompatibility(const effect_descriptor_t *desc);
@@ -80,7 +83,7 @@
     Mutex mLock;
     const sp<IAfDeviceEffectManagerCallback> mAfDeviceEffectManagerCallback;
     const sp<DeviceEffectManagerCallback> mMyCallback;
-    std::map<AudioDeviceTypeAddr, sp<IAfDeviceEffectProxy>> mDeviceEffects;
+    std::map<AudioDeviceTypeAddr, std::vector<sp<IAfDeviceEffectProxy>>> mDeviceEffects;
 };
 
 class DeviceEffectManagerCallback : public EffectCallbackInterface {
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 7590afd..c3e1fba 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -514,11 +514,12 @@
     if (!locked) {
         result.append("\t\tCould not lock Fx mutex:\n");
     }
-
-    result.append("\t\tSession State Registered Enabled Suspended:\n");
-    result.appendFormat("\t\t%05d   %03d   %s          %s       %s\n",
-            mSessionId, mState, mPolicyRegistered ? "y" : "n",
-            mPolicyEnabled ? "y" : "n", mSuspended ? "y" : "n");
+    bool isInternal = isInternal_l();
+    result.append("\t\tSession State Registered Internal Enabled Suspended:\n");
+    result.appendFormat("\t\t%05d   %03d   %s          %s        %s       %s\n",
+            mSessionId, mState, mPolicyRegistered ? "y" : "n", isInternal ? "y" : "n",
+            ((isInternal && isEnabled()) || (!isInternal && mPolicyEnabled)) ? "y" : "n",
+            mSuspended ? "y" : "n");
 
     result.append("\t\tDescriptor:\n");
     char uuidStr[64];
@@ -1008,8 +1009,9 @@
     // mConfig.outputCfg.buffer.frameCount cannot be zero.
     mMaxDisableWaitCnt = (uint32_t)std::max(
             (uint64_t)1, // mMaxDisableWaitCnt must be greater than zero.
-            (uint64_t)MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
-                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount));
+            (uint64_t)mConfig.outputCfg.buffer.frameCount == 0 ? 1
+                : (MAX_DISABLE_TIME_MS * mConfig.outputCfg.samplingRate
+                / ((uint64_t)1000 * mConfig.outputCfg.buffer.frameCount)));
 
 exit:
     // TODO: consider clearing mConfig on error.
@@ -3302,6 +3304,23 @@
     return status;
 }
 
+status_t DeviceEffectProxy::onUpdatePatch(audio_patch_handle_t oldPatchHandle,
+        audio_patch_handle_t newPatchHandle,
+        const IAfPatchPanel::Patch& /* patch */) {
+    status_t status = NAME_NOT_FOUND;
+    ALOGV("%s", __func__);
+    Mutex::Autolock _l(mProxyLock);
+    if (mEffectHandles.find(oldPatchHandle) != mEffectHandles.end()) {
+        ALOGV("%s replacing effect from handle %d to handle %d", __func__, oldPatchHandle,
+                newPatchHandle);
+        sp<IAfEffectHandle> effect = mEffectHandles.at(oldPatchHandle);
+        mEffectHandles.erase(oldPatchHandle);
+        mEffectHandles.emplace(newPatchHandle, effect);
+        status = NO_ERROR;
+    }
+    return status;
+}
+
 status_t DeviceEffectProxy::onCreatePatch(
         audio_patch_handle_t patchHandle, const IAfPatchPanel::Patch& patch) {
     status_t status = NAME_NOT_FOUND;
@@ -3315,6 +3334,9 @@
     }
     if (status == NO_ERROR || status == ALREADY_EXISTS) {
         Mutex::Autolock _l(mProxyLock);
+        size_t erasedHandle = mEffectHandles.erase(patchHandle);
+        ALOGV("%s %s effecthandle %p for patch %d",
+                __func__, (erasedHandle == 0 ? "adding" : "replacing"), handle.get(), patchHandle);
         mEffectHandles.emplace(patchHandle, handle);
     }
     ALOGW_IF(status == BAD_VALUE,
@@ -3347,18 +3369,21 @@
 
     if (mDescriptor.flags & EFFECT_FLAG_HW_ACC_TUNNEL) {
         Mutex::Autolock _l(mProxyLock);
-        mDevicePort = *port;
-        mHalEffect = new EffectModule(mMyCallback,
+        if (mHalEffect != nullptr && mDevicePort.id == port->id) {
+            ALOGV("%s reusing HAL effect", __func__);
+        } else {
+            mDevicePort = *port;
+            mHalEffect = new EffectModule(mMyCallback,
                                       const_cast<effect_descriptor_t *>(&mDescriptor),
                                       mMyCallback->newEffectId(), AUDIO_SESSION_DEVICE,
                                       false /* pinned */, port->id);
-        if (audio_is_input_device(mDevice.mType)) {
-            mHalEffect->setInputDevice(mDevice);
-        } else {
-            mHalEffect->setDevices({mDevice});
+            if (audio_is_input_device(mDevice.mType)) {
+                mHalEffect->setInputDevice(mDevice);
+            } else {
+                mHalEffect->setDevices({mDevice});
+            }
+            mHalEffect->configure();
         }
-        mHalEffect->configure();
-
         *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
                                    mNotifyFramesProcessed);
         status = (*handle)->initCheck();
@@ -3446,6 +3471,23 @@
     return mManagerCallback->removeEffectFromHal(&mDevicePort, effect);
 }
 
+status_t DeviceEffectProxy::command(
+        int32_t cmdCode, const std::vector<uint8_t>& cmdData, int32_t maxReplySize,
+        std::vector<uint8_t>* reply) {
+    Mutex::Autolock _l(mProxyLock);
+    status_t status = EffectBase::command(cmdCode, cmdData, maxReplySize, reply);
+    if (status == NO_ERROR) {
+        for (auto& handle : mEffectHandles) {
+            sp<IAfEffectBase> effect = handle.second->effect().promote();
+            if (effect != nullptr) {
+                status = effect->command(cmdCode, cmdData, maxReplySize, reply);
+            }
+        }
+    }
+    ALOGV("%s status %d", __func__, status);
+    return status;
+}
+
 bool DeviceEffectProxy::isOutput() const {
     if (mDevicePort.id != AUDIO_PORT_HANDLE_NONE) {
         return mDevicePort.role == AUDIO_PORT_ROLE_SINK;
@@ -3512,7 +3554,7 @@
     }
 
     if (locked) {
-        mLock.unlock();
+        mProxyLock.unlock();
     }
 }
 
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 82ad486..79b4e63 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -668,6 +668,9 @@
     status_t onCreatePatch(audio_patch_handle_t patchHandle,
             const IAfPatchPanel::Patch& patch) final;
 
+    status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
+           const IAfPatchPanel::Patch& patch) final;
+
     void onReleasePatch(audio_patch_handle_t patchHandle) final;
 
     size_t removeEffect(const sp<IAfEffectModule>& effect) final;
@@ -681,6 +684,11 @@
     audio_channel_mask_t channelMask() const final;
     uint32_t channelCount() const final;
 
+    status_t command(int32_t cmdCode,
+                     const std::vector<uint8_t>& cmdData,
+                     int32_t maxReplySize,
+                     std::vector<uint8_t>* reply) final;
+
     void dump2(int fd, int spaces) const final;
 
 private:
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index 7393448..c4f0de3 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -124,6 +124,11 @@
     virtual sp<IAfEffectModule> asEffectModule() = 0;
     virtual sp<IAfDeviceEffectProxy> asDeviceEffectProxy() = 0;
 
+    virtual status_t command(int32_t cmdCode,
+            const std::vector<uint8_t>& cmdData,
+            int32_t maxReplySize,
+            std::vector<uint8_t>* reply) = 0;
+
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 
 private:
@@ -132,11 +137,6 @@
     virtual void setSuspended(bool suspended) = 0;
     virtual bool suspended() const = 0;
 
-    virtual status_t command(int32_t cmdCode,
-            const std::vector<uint8_t>& cmdData,
-            int32_t maxReplySize,
-            std::vector<uint8_t>* reply) = 0;
-
     virtual ssize_t disconnectHandle(IAfEffectHandle *handle, bool unpinIfLast) = 0;
     virtual ssize_t removeHandle_l(IAfEffectHandle *handle) = 0;
     virtual IAfEffectHandle* controlHandle_l() = 0;
@@ -361,6 +361,9 @@
     virtual status_t onCreatePatch(
             audio_patch_handle_t patchHandle,
             const IAfPatchPanel::Patch& patch) = 0;
+    virtual status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle,
+            audio_patch_handle_t newPatchHandle,
+            const IAfPatchPanel::Patch& patch) = 0;
     virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
 
     virtual void dump2(int fd, int spaces) const = 0; // TODO(b/291319101) naming?
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index be51d51..6ee1ec9 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -398,7 +398,8 @@
             audio_port_handle_t portId,
             const sp<media::IAudioTrackCallback>& callback,
             bool isSpatialized,
-            bool isBitPerfect) = 0;
+            bool isBitPerfect,
+            audio_output_flags_t* afTrackFlags) = 0;
 
     virtual status_t addTrack_l(const sp<IAfTrack>& track) = 0;
     virtual bool destroyTrack_l(const sp<IAfTrack>& track) = 0;
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 64a5843..0ebb3eb 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -235,7 +235,17 @@
 
     std::lock_guard _afl(mAfMelReporterCallback->mutex());
     std::lock_guard _l(mLock);
-    stopMelComputationForPatch_l(melPatch);
+    if (melPatch.csdActive) {
+        // only need to stop if patch was active
+        melPatch.csdActive = false;
+        stopMelComputationForPatch_l(melPatch);
+    }
+}
+
+void MelReporter::onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+        audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+    onReleaseAudioPatch(oldHandle);
+    onCreateAudioPatch(newHandle, patch);
 }
 
 sp<media::ISoundDose> MelReporter::getSoundDoseInterface(
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 78c6c0c..e26397c 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -84,6 +84,9 @@
     void onCreateAudioPatch(audio_patch_handle_t handle,
         const IAfPatchPanel::Patch& patch) final;
     void onReleaseAudioPatch(audio_patch_handle_t handle) final;
+    void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+                            audio_patch_handle_t newHandle,
+                            const IAfPatchPanel::Patch& patch) final;
 
     /**
      * The new metadata can determine whether we should compute MEL for the given thread.
diff --git a/services/audioflinger/PatchCommandThread.cpp b/services/audioflinger/PatchCommandThread.cpp
index c3259f1..8d5eb9f 100644
--- a/services/audioflinger/PatchCommandThread.cpp
+++ b/services/audioflinger/PatchCommandThread.cpp
@@ -58,6 +58,16 @@
     releaseAudioPatchCommand(handle);
 }
 
+void PatchCommandThread::updateAudioPatch(audio_patch_handle_t oldHandle,
+        audio_patch_handle_t newHandle, const IAfPatchPanel::Patch& patch) {
+    ALOGV("%s handle %d mHalHandle %d num sinks %d device sink %08x",
+            __func__, oldHandle, patch.mHalHandle,
+            patch.mAudioPatch.num_sinks,
+            patch.mAudioPatch.num_sinks > 0 ? patch.mAudioPatch.sinks[0].ext.device.type : 0);
+
+    updateAudioPatchCommand(oldHandle, newHandle, patch);
+}
+
 bool PatchCommandThread::threadLoop()
 NO_THREAD_SAFETY_ANALYSIS  // bug in clang compiler.
 {
@@ -104,6 +114,21 @@
                     }
                 }
                     break;
+                case UPDATE_AUDIO_PATCH: {
+                    const auto data = (UpdateAudioPatchData*) command->mData.get();
+                    ALOGV("%s processing update audio patch old handle %d new handle %d",
+                          __func__,
+                          data->mOldHandle, data->mNewHandle);
+
+                    for (const auto& listener : listenersCopy) {
+                        auto spListener = listener.promote();
+                        if (spListener) {
+                            spListener->onUpdateAudioPatch(data->mOldHandle,
+                                    data->mNewHandle, data->mPatch);
+                        }
+                    }
+                }
+                    break;
                 default:
                     ALOGW("%s unknown command %d", __func__, command->mCommand);
                     break;
@@ -145,6 +170,16 @@
     sendCommand(command);
 }
 
+void PatchCommandThread::updateAudioPatchCommand(
+        audio_patch_handle_t oldHandle, audio_patch_handle_t newHandle,
+        const IAfPatchPanel::Patch& patch) {
+    sp<Command> command = sp<Command>::make(UPDATE_AUDIO_PATCH,
+                                           new UpdateAudioPatchData(oldHandle, newHandle, patch));
+    ALOGV("%s adding update patch old handle %d new handle %d mHalHandle %d.",
+            __func__, oldHandle, newHandle, patch.mHalHandle);
+    sendCommand(command);
+}
+
 void PatchCommandThread::exit() {
     ALOGV("%s", __func__);
     {
diff --git a/services/audioflinger/PatchCommandThread.h b/services/audioflinger/PatchCommandThread.h
index a312fb7..66018d7 100644
--- a/services/audioflinger/PatchCommandThread.h
+++ b/services/audioflinger/PatchCommandThread.h
@@ -38,6 +38,7 @@
     enum {
         CREATE_AUDIO_PATCH,
         RELEASE_AUDIO_PATCH,
+        UPDATE_AUDIO_PATCH,
     };
 
     class PatchCommandListener : public virtual RefBase {
@@ -45,6 +46,9 @@
         virtual void onCreateAudioPatch(audio_patch_handle_t handle,
                                         const IAfPatchPanel::Patch& patch) = 0;
         virtual void onReleaseAudioPatch(audio_patch_handle_t handle) = 0;
+        virtual void onUpdateAudioPatch(audio_patch_handle_t oldHandle,
+                                        audio_patch_handle_t newHandle,
+                                        const IAfPatchPanel::Patch& patch) = 0;
     };
 
     PatchCommandThread() : Thread(false /* canCallJava */) {}
@@ -54,6 +58,9 @@
 
     void createAudioPatch(audio_patch_handle_t handle, const IAfPatchPanel::Patch& patch);
     void releaseAudioPatch(audio_patch_handle_t handle);
+    void updateAudioPatch(audio_patch_handle_t oldHandle,
+                          audio_patch_handle_t newHandle,
+                          const IAfPatchPanel::Patch& patch);
 
     // Thread virtuals
     void onFirstRef() override;
@@ -64,7 +71,9 @@
     void createAudioPatchCommand(audio_patch_handle_t handle,
             const IAfPatchPanel::Patch& patch);
     void releaseAudioPatchCommand(audio_patch_handle_t handle);
-
+    void updateAudioPatchCommand(audio_patch_handle_t oldHandle,
+                                 audio_patch_handle_t newHandle,
+                                 const IAfPatchPanel::Patch& patch);
 private:
     class CommandData;
 
@@ -98,6 +107,18 @@
         audio_patch_handle_t mHandle;
     };
 
+    class UpdateAudioPatchData : public CommandData {
+    public:
+        UpdateAudioPatchData(audio_patch_handle_t oldHandle,
+                             audio_patch_handle_t newHandle,
+                             const IAfPatchPanel::Patch& patch)
+            :   mOldHandle(oldHandle), mNewHandle(newHandle), mPatch(patch) {}
+
+        const audio_patch_handle_t mOldHandle;
+        const audio_patch_handle_t mNewHandle;
+        const IAfPatchPanel::Patch mPatch;
+    };
+
     void sendCommand(const sp<Command>& command);
 
     std::string mThreadName;
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index ec96de5..f11a530 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -134,7 +134,8 @@
     if (patch->num_sources > 2) {
         return INVALID_OPERATION;
     }
-
+    bool reuseExistingHalPatch = false;
+    audio_patch_handle_t oldhandle = AUDIO_PATCH_HANDLE_NONE;
     if (*handle != AUDIO_PATCH_HANDLE_NONE) {
         auto iter = mPatches.find(*handle);
         if (iter != mPatches.end()) {
@@ -152,6 +153,7 @@
             if (removedPatch.mHalHandle != AUDIO_PATCH_HANDLE_NONE) {
                 audio_module_handle_t hwModule = AUDIO_MODULE_HANDLE_NONE;
                 const struct audio_patch &oldPatch = removedPatch.mAudioPatch;
+                oldhandle = *handle;
                 if (oldPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
                         (patch->sources[0].type != AUDIO_PORT_TYPE_DEVICE ||
                                 oldPatch.sources[0].ext.device.hw_module !=
@@ -174,8 +176,12 @@
                     hwDevice->releaseAudioPatch(removedPatch.mHalHandle);
                 }
                 halHandle = removedPatch.mHalHandle;
+                // Prevent to remove/add device effect when mix / device did not change, and
+                // hal patch has not been released
+                // Note that no patch leak at hal layer as halHandle is reused.
+                reuseExistingHalPatch = (hwDevice == 0) && patchesHaveSameRoute(*patch, oldPatch);
             }
-            erasePatch(*handle);
+            erasePatch(*handle, reuseExistingHalPatch);
         }
     }
 
@@ -436,7 +442,12 @@
         *handle = static_cast<audio_patch_handle_t>(
                 mAfPatchPanelCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_PATCH));
         newPatch.mHalHandle = halHandle;
-        mAfPatchPanelCallback->getPatchCommandThread()->createAudioPatch(*handle, newPatch);
+        if (reuseExistingHalPatch) {
+            mAfPatchPanelCallback->getPatchCommandThread()->updateAudioPatch(
+                    oldhandle, *handle, newPatch);
+        } else {
+            mAfPatchPanelCallback->getPatchCommandThread()->createAudioPatch(*handle, newPatch);
+        }
         if (insertedModule != AUDIO_MODULE_HANDLE_NONE) {
             addSoftwarePatchToInsertedModules(insertedModule, *handle, &newPatch.mAudioPatch);
         }
@@ -792,10 +803,12 @@
     return status;
 }
 
-void PatchPanel::erasePatch(audio_patch_handle_t handle) {
+void PatchPanel::erasePatch(audio_patch_handle_t handle, bool reuseExistingHalPatch) {
     mPatches.erase(handle);
     removeSoftwarePatchFromInsertedModules(handle);
-    mAfPatchPanelCallback->getPatchCommandThread()->releaseAudioPatch(handle);
+    if (!reuseExistingHalPatch) {
+        mAfPatchPanelCallback->getPatchCommandThread()->releaseAudioPatch(handle);
+    }
 }
 
 /* List connected audio ports and they attributes */
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index b8b7b79..a95c601 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -75,7 +75,38 @@
             audio_module_handle_t module, audio_patch_handle_t handle,
             const struct audio_patch *patch);
     void removeSoftwarePatchFromInsertedModules(audio_patch_handle_t handle);
-    void erasePatch(audio_patch_handle_t handle);
+    /**
+     * erase the patch referred by its handle.
+     * @param handle of the patch to be erased
+     * @param reuseExistingHalPatch if set, do not trig the callback of listeners, listener
+     * would receive instead a onUpdateAudioPatch when the patch will be recreated.
+     * It prevents for example DeviceEffectManager to spuriously remove / add a device on an already
+     * opened input / output mix.
+     */
+    void erasePatch(audio_patch_handle_t handle, bool reuseExistingHalPatch = false);
+
+    /**
+     * Returns true if the old and new patches passed as arguments describe the same
+     * connections between the first sink and the first source
+     * @param oldPatch previous patch
+     * @param newPatch new patch
+     * @return true if the route is unchanged between the old and new patch, false otherwise
+     */
+    inline bool patchesHaveSameRoute(
+            const struct audio_patch &newPatch, const struct audio_patch &oldPatch) const {
+        return (newPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                oldPatch.sources[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                newPatch.sources[0].id == oldPatch.sources[0].id &&
+                newPatch.sinks[0].type == AUDIO_PORT_TYPE_MIX &&
+                oldPatch.sinks[0].type == AUDIO_PORT_TYPE_MIX &&
+                newPatch.sinks[0].ext.mix.handle == oldPatch.sinks[0].ext.mix.handle) ||
+                (newPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                oldPatch.sinks[0].type == AUDIO_PORT_TYPE_DEVICE &&
+                newPatch.sinks[0].id == oldPatch.sinks[0].id &&
+                newPatch.sources[0].type == AUDIO_PORT_TYPE_MIX &&
+                oldPatch.sources[0].type == AUDIO_PORT_TYPE_MIX &&
+                newPatch.sources[0].ext.mix.handle == oldPatch.sources[0].ext.mix.handle);
+    }
 
     const sp<IAfPatchPanelCallback> mAfPatchPanelCallback;
     std::map<audio_patch_handle_t, Patch> mPatches;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 85ff7bd..a36494c 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2378,7 +2378,8 @@
         audio_port_handle_t portId,
         const sp<media::IAudioTrackCallback>& callback,
         bool isSpatialized,
-        bool isBitPerfect)
+        bool isBitPerfect,
+        audio_output_flags_t *afTrackFlags)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2696,6 +2697,7 @@
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
         }
+        *afTrackFlags = trackFlags;
 
         track = IAfTrack::create(this, client, streamType, attr, sampleRate, format,
                           channelMask, frameCount,
@@ -10734,14 +10736,24 @@
         AudioHwDevice *hwDev,  AudioStreamOut *output, bool systemReady)
     : MmapThread(afThreadCallback, id, hwDev, output->stream, systemReady, true /* isOut */),
       mStreamType(AUDIO_STREAM_MUSIC),
-      mStreamVolume(1.0),
-      mStreamMute(false),
       mOutput(output)
 {
     snprintf(mThreadName, kThreadNameLength, "AudioMmapOut_%X", id);
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = afThreadCallback->masterVolume_l();
     mMasterMute = afThreadCallback->masterMute_l();
+
+    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+        mStreamTypes[stream].volume = 0.0f;
+        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    }
+    // Audio patch and call assistant volume are always max
+    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
+
     if (mAudioHwDev) {
         if (mAudioHwDev->canSetMasterVolume()) {
             mMasterVolume = 1.0;
@@ -10798,8 +10810,8 @@
 void MmapPlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
 {
     Mutex::Autolock _l(mLock);
+    mStreamTypes[stream].volume = value;
     if (stream == mStreamType) {
-        mStreamVolume = value;
         broadcast_l();
     }
 }
@@ -10807,17 +10819,14 @@
 float MmapPlaybackThread::streamVolume(audio_stream_type_t stream) const
 {
     Mutex::Autolock _l(mLock);
-    if (stream == mStreamType) {
-        return mStreamVolume;
-    }
-    return 0.0f;
+    return mStreamTypes[stream].volume;
 }
 
 void MmapPlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
 {
     Mutex::Autolock _l(mLock);
+    mStreamTypes[stream].mute = muted;
     if (stream == mStreamType) {
-        mStreamMute= muted;
         broadcast_l();
     }
 }
@@ -10857,14 +10866,13 @@
 {
     float volume;
 
-    if (mMasterMute || mStreamMute) {
+    if (mMasterMute || streamMuted_l()) {
         volume = 0;
     } else {
-        volume = mMasterVolume * mStreamVolume;
+        volume = mMasterVolume * streamVolume_l();
     }
 
     if (volume != mHalVolFloat) {
-
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
 
@@ -10898,8 +10906,8 @@
             track->setMetadataHasChanged();
             track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
                 /*muteState=*/{mMasterMute,
-                               mStreamVolume == 0.f,
-                               mStreamMute,
+                               streamVolume_l() == 0.f,
+                               streamMuted_l(),
                                // TODO(b/241533526): adjust logic to include mute from AppOps
                                false /*muteFromPlaybackRestricted*/,
                                false /*muteFromClientVolume*/,
@@ -11012,7 +11020,7 @@
     MmapThread::dumpInternals_l(fd, args);
 
     dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
-            mStreamType, mStreamVolume, mHalVolFloat, mStreamMute);
+            mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
     dprintf(fd, "  Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
 }
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index f5aa6d5..79f75ca 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -935,7 +935,8 @@
                                 audio_port_handle_t portId,
                                 const sp<media::IAudioTrackCallback>& callback,
                                 bool isSpatialized,
-                                bool isBitPerfect) final;
+                                bool isBitPerfect,
+                                audio_output_flags_t* afTrackFlags) final;
 
     bool isTrackActive(const sp<IAfTrack>& track) const final {
         return mActiveTracks.indexOf(track) >= 0;
@@ -2220,12 +2221,17 @@
 
 protected:
     void dumpInternals_l(int fd, const Vector<String16>& args) final;
+                float       streamVolume_l() const {
+                    return mStreamTypes[mStreamType].volume;
+                }
+                bool     streamMuted_l() const {
+                    return mStreamTypes[mStreamType].mute;
+                }
 
+                stream_type_t               mStreamTypes[AUDIO_STREAM_CNT];
                 audio_stream_type_t         mStreamType;
                 float                       mMasterVolume;
-                float                       mStreamVolume;
                 bool                        mMasterMute;
-                bool                        mStreamMute;
                 AudioStreamOut*             mOutput;
 
                 mediautils::atomic_sp<audio_utils::MelProcessor> mMelProcessor;
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 97470a2..7accaf0 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -477,8 +477,7 @@
     switch (device) {
         case AUDIO_DEVICE_OUT_WIRED_HEADSET:
         case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-        // TODO(b/278265907): enable A2DP when we can distinguish A2DP headsets
-        // case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
+        case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP:
         case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
         case AUDIO_DEVICE_OUT_USB_HEADSET:
         case AUDIO_DEVICE_OUT_BLE_HEADSET:
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 1e57edd..13b70e5 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -328,6 +328,13 @@
     wp<AudioPolicyMix> mPolicyMix;  // non NULL when used by a dynamic policy
 
     virtual uint32_t getRecommendedMuteDurationMs() const { return 0; }
+    virtual std::string info() const {
+        std::string result;
+        result.append("[portId:" );
+        result.append(android::internal::ToString(getId()));
+        result.append("]");
+        return result;
+    }
 
 protected:
     const sp<PolicyAudioPort> mPolicyAudioPort;
@@ -471,6 +478,8 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    virtual std::string info() const override;
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index 59eee52..d40bbcb 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -25,6 +25,10 @@
 
 namespace android {
 
+class AudioInputCollection;
+class AudioInputDescriptor;
+class AudioPolicyClientInterface;
+
 class EffectDescriptor : public RefBase
 {
 public:
@@ -40,6 +44,8 @@
 
     int mId;                   // effect unique ID
     audio_io_handle_t mIo;     // io the effect is attached to
+    bool mIsOrphan = false;    // on creation, effect is not yet attached but not yet orphan
+    bool mEnabledWhenMoved = false;    // Backup enabled state before being moved
     audio_session_t mSession;  // audio session the effect is on
     effect_descriptor_t mDesc; // effect descriptor
     bool mEnabled;             // enabled state: CPU load being used or not
@@ -69,12 +75,28 @@
 
     void moveEffects(audio_session_t session,
                      audio_io_handle_t srcOutput,
-                     audio_io_handle_t dstOutput);
+                     audio_io_handle_t dstOutput,
+                     AudioPolicyClientInterface *clientInterface);
     void moveEffects(const std::vector<int>& ids, audio_io_handle_t dstOutput);
+    void moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo, audio_io_handle_t dstIo,
+            const AudioInputCollection *inputs, AudioPolicyClientInterface *clientInterface);
+    void moveEffectsForIo(audio_session_t sessionId, audio_io_handle_t dstIo,
+            const AudioInputCollection *inputs, AudioPolicyClientInterface *mClientInterface);
+    void putOrphanEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+            const AudioInputCollection *inputs, AudioPolicyClientInterface *clientInterface);
 
+    /**
+     * @brief Checks if an effect session was already attached to an io handle and return it if
+     * found. Check only for a given effect type if effectType is not null or for any effect
+     * otherwise.
+     * @param sessionId to consider.
+     * @param effectType to consider.
+     * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
+     */
     audio_io_handle_t getIoForSession(audio_session_t sessionId,
                                       const effect_uuid_t *effectType = nullptr);
-
+    bool hasOrphansForSession(audio_session_t sessionId);
+    EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
     void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 2f424b8..8bd7575 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -987,6 +987,18 @@
     return clientsForStream;
 }
 
+std::string SwAudioOutputDescriptor::info() const {
+    std::string result;
+    result.append("[" );
+    result.append(AudioOutputDescriptor::info());
+    result.append("[io:" );
+    result.append(android::internal::ToString(mIoHandle));
+    result.append(", " );
+    result.append(isDuplicated() ? "duplicating" : mProfile->getTagName());
+    result.append("]]");
+    return result;
+}
+
 void SwAudioOutputCollection::dump(String8 *dst) const
 {
     dst->appendFormat("\n Outputs (%zu):\n", size());
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index bc2ba31..0f9cc89 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -289,12 +289,20 @@
             continue; // skip the mix
         }
 
-        if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) && is_mix_loopback(policyMix->mRouteFlags)) {
-            // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
-            // using dynamic audio policy.
-            ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic audio policy mix.",
-                __func__);
-            return INVALID_OPERATION;
+        if (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+            if (is_mix_loopback(policyMix->mRouteFlags)) {
+                // AAudio MMAP_NOIRQ streams cannot be routed to loopback/loopback+render
+                // using dynamic audio policy.
+                ALOGD("%s: Rejecting MMAP_NOIRQ request matched to loopback dynamic "
+                      "audio policy mix.", __func__);
+                return INVALID_OPERATION;
+            }
+            if (mixDevice != nullptr && !mixDevice->isMmap()) {
+                ALOGD("%s: Rejecting MMAP_NOIRQ request matched to dynamic audio policy "
+                      "mix pointing to device %s which doesn't support mmap", __func__,
+                      mixDevice->toString(false).c_str());
+                return INVALID_OPERATION;
+            }
         }
 
         if (mixDevice != nullptr && mixDevice->equals(requestedDevice)) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 3f9c8b0..1f6946c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -18,9 +18,15 @@
 //#define LOG_NDEBUG 0
 
 #include <android-base/stringprintf.h>
+
+#include "AudioInputDescriptor.h"
 #include "EffectDescriptor.h"
 #include <utils/String8.h>
 
+#include <AudioPolicyInterface.h>
+#include "AudioPolicyMix.h"
+#include "HwModule.h"
+
 namespace android {
 
 void EffectDescriptor::dump(String8 *dst, int spaces) const
@@ -175,30 +181,57 @@
     return MAX_EFFECTS_MEMORY;
 }
 
-void EffectDescriptorCollection::moveEffects(audio_session_t session,
-                                             audio_io_handle_t srcOutput,
-                                             audio_io_handle_t dstOutput)
+void EffectDescriptorCollection::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
+                                             audio_io_handle_t dstIo,
+                                             AudioPolicyClientInterface *clientInterface)
 {
-    ALOGV("%s session %d srcOutput %d dstOutput %d", __func__, session, srcOutput, dstOutput);
+    ALOGV("%s session %d srcIo %d dstIo %d", __func__, sessionId, srcIo, dstIo);
     for (size_t i = 0; i < size(); i++) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mSession == session && effect->mIo == srcOutput) {
-            effect->mIo = dstOutput;
+        if (effect->mSession == sessionId && effect->mIo == srcIo) {
+            effect->mIo = dstIo;
+            // Backup enable state before any updatePolicyState call
+            effect->mIsOrphan = (dstIo == AUDIO_IO_HANDLE_NONE);
+        }
+    }
+    clientInterface->moveEffects(sessionId, srcIo, dstIo);
+}
+
+void EffectDescriptorCollection::moveEffects(const std::vector<int>& ids, audio_io_handle_t dstIo)
+{
+    ALOGV("%s num effects %zu, first ID %d, dstIo %d",
+        __func__, ids.size(), ids.size() ? ids[0] : 0, dstIo);
+    for (size_t i = 0; i < size(); i++) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (std::find(begin(ids), end(ids), effect->mId) != end(ids)) {
+            effect->mIo = dstIo;
+            effect->mIsOrphan = (dstIo == AUDIO_IO_HANDLE_NONE);
         }
     }
 }
 
-void EffectDescriptorCollection::moveEffects(const std::vector<int>& ids,
-                                             audio_io_handle_t dstOutput)
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId)
 {
-    ALOGV("%s num effects %zu, first ID %d, dstOutput %d",
-        __func__, ids.size(), ids.size() ? ids[0] : 0, dstOutput);
-    for (size_t i = 0; i < size(); i++) {
+    for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (std::find(begin(ids), end(ids), effect->mId) != end(ids)) {
-            effect->mIo = dstOutput;
+        if (effect->mSession == sessionId && effect->mIsOrphan) {
+            return true;
         }
     }
+    return false;
+}
+
+EffectDescriptorCollection EffectDescriptorCollection::getOrphanEffectsForSession(
+        audio_session_t sessionId) const
+{
+    EffectDescriptorCollection effects;
+    for (size_t i = 0; i < size(); i++) {
+        sp<EffectDescriptor> effect = valueAt(i);
+        if (effect->mSession == sessionId && effect->mIsOrphan) {
+            effects.add(keyAt(i), effect);
+        }
+    }
+    return effects;
 }
 
 audio_io_handle_t EffectDescriptorCollection::getIoForSession(audio_session_t sessionId,
@@ -214,6 +247,73 @@
     return AUDIO_IO_HANDLE_NONE;
 }
 
+void EffectDescriptorCollection::moveEffectsForIo(audio_session_t session,
+        audio_io_handle_t dstIo, const AudioInputCollection *inputs,
+        AudioPolicyClientInterface *clientInterface)
+{
+    // No src io: try to find from effect session the src Io to move from
+    audio_io_handle_t srcIo = getIoForSession(session);
+    if (hasOrphansForSession(session) || (srcIo != AUDIO_IO_HANDLE_NONE && srcIo != dstIo)) {
+        moveEffects(session, srcIo, dstIo, inputs, clientInterface);
+    }
+}
+
+void EffectDescriptorCollection::moveEffects(audio_session_t session,
+        audio_io_handle_t srcIo, audio_io_handle_t dstIo, const AudioInputCollection *inputs,
+        AudioPolicyClientInterface *clientInterface)
+{
+    if ((srcIo != AUDIO_IO_HANDLE_NONE && srcIo == dstIo)
+            || (srcIo == AUDIO_IO_HANDLE_NONE && !hasOrphansForSession(session))) {
+        return;
+    }
+    // Either we may find orphan effects for given session or effects for this session might have
+    // been assigned first to another input (it may happen when an input is released or recreated
+    // after client sets its preferred device)
+    EffectDescriptorCollection effectsToMove;
+    if (srcIo == AUDIO_IO_HANDLE_NONE) {
+        ALOGV("%s: restoring effects for session %d from orphan park to io=%d", __func__,
+                session, dstIo);
+        effectsToMove = getOrphanEffectsForSession(session);
+    } else {
+        ALOGV("%s: moving effects for session %d from io=%d to io=%d", __func__, session, srcIo,
+              dstIo);
+        if (const sp<AudioInputDescriptor>& previousInputDesc = inputs->valueFor(srcIo)) {
+            effectsToMove = getEffectsForIo(srcIo);
+            for (size_t i = 0; i < effectsToMove.size(); ++i) {
+                const sp<EffectDescriptor>& effect = effectsToMove.valueAt(i);
+                effect->mEnabledWhenMoved = effect->mEnabled;
+                previousInputDesc->trackEffectEnabled(effect, false);
+            }
+        } else {
+            ALOGW("%s: no effect descriptor for srcIo %d", __func__, srcIo);
+        }
+    }
+    moveEffects(session, srcIo, dstIo, clientInterface);
+
+    if (dstIo != AUDIO_IO_HANDLE_NONE) {
+        if (const sp<AudioInputDescriptor>& inputDesc = inputs->valueFor(dstIo)) {
+            for (size_t i = 0; i < effectsToMove.size(); ++i) {
+                const sp<EffectDescriptor>& effect = effectsToMove.valueAt(i);
+                inputDesc->trackEffectEnabled(effect, effect->mEnabledWhenMoved);
+            }
+        } else {
+            ALOGW("%s: no effect descriptor for dstIo %d", __func__, dstIo);
+        }
+    }
+}
+
+void EffectDescriptorCollection::putOrphanEffects(audio_session_t session,
+        audio_io_handle_t srcIo, const AudioInputCollection *inputs,
+        AudioPolicyClientInterface *clientInterface)
+{
+    if (getIoForSession(session) != srcIo) {
+       // Effect session not held by this client io handle
+       return;
+    }
+    ALOGV("%s: park effects for session %d and io=%d to orphans", __func__, session, srcIo);
+    moveEffects(session, srcIo, AUDIO_IO_HANDLE_NONE, inputs, clientInterface);
+}
+
 EffectDescriptorCollection EffectDescriptorCollection::getEffectsForIo(audio_io_handle_t io) const
 {
     EffectDescriptorCollection effects;
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
new file mode 100644
index 0000000..38a2cde
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -0,0 +1,74 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ ******************************************************************************/
+
+cc_defaults {
+    name: "audiopolicy_aidl_fuzzer_defaults",
+    shared_libs: [
+        "audiopolicy-aidl-cpp",
+        "audiopolicy-types-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudiopolicy",
+        "libaudiopolicymanagerdefault",
+        "libactivitymanager_aidl",
+        "libaudiohal",
+        "libaudiopolicyservice",
+        "libaudioflinger",
+        "libaudioclient",
+        "libaudioprocessing",
+        "libhidlbase",
+        "liblog",
+        "libmediautils",
+        "libnblog",
+        "libnbaio",
+        "libpowermanager",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
+    ],
+    static_libs: [
+        "libfakeservicemanager",
+        "libmediaplayerservice",
+    ],
+    header_libs: [
+        "libaudiohal_headers",
+        "libaudioflinger_headers",
+        "libaudiopolicymanager_interface_headers",
+        "libbinder_headers",
+        "libmedia_headers",
+    ],
+    fuzz_config: {
+        cc: [
+            "android-media-fuzzing-reports@google.com",
+        ],
+        componentid: 155276,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libaudiopolicy",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
+}
+
+cc_fuzz {
+    name: "audiopolicy_aidl_fuzzer",
+    srcs: ["audiopolicy_aidl_fuzzer.cpp"],
+    defaults: [
+        "audiopolicy_aidl_fuzzer_defaults",
+        "service_fuzzer_defaults",
+    ],
+}
diff --git a/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
new file mode 100644
index 0000000..ca79c49
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/audiopolicy_aidl_fuzzer.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <AudioFlinger.h>
+#include <android-base/logging.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_process.h>
+#include <android/media/IAudioPolicyService.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <fuzzbinder/libbinder_driver.h>
+#include <fuzzbinder/random_binder.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/IAudioFlinger.h>
+#include <service/AudioPolicyService.h>
+
+using namespace android;
+using namespace android::binder;
+using namespace android::hardware;
+using android::fuzzService;
+
+[[clang::no_destroy]] static std::once_flag gSmOnce;
+sp<FakeServiceManager> gFakeServiceManager;
+
+bool addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
+                FuzzedDataProvider& fdp) {
+    sp<IBinder> binder = getRandomBinder(&fdp);
+    if (binder == nullptr) {
+        return false;
+    }
+    CHECK_EQ(NO_ERROR, fakeServiceManager->addService(serviceName, binder));
+    return true;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
+    std::call_once(gSmOnce, [&] {
+        /* Create a FakeServiceManager instance and add required services */
+        gFakeServiceManager = sp<FakeServiceManager>::make();
+        setDefaultServiceManager(gFakeServiceManager);
+    });
+    gFakeServiceManager->clear();
+
+    for (const char* service :
+         {"activity", "sensor_privacy", "permission", "scheduling_policy",
+          "android.hardware.audio.core.IConfig", "batterystats", "media.metrics"}) {
+        if (!addService(String16(service), gFakeServiceManager, fdp)) {
+            return 0;
+        }
+    }
+
+    const auto audioFlinger = sp<AudioFlinger>::make();
+    const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(
+                     String16(IAudioFlinger::DEFAULT_SERVICE_NAME), IInterface::asBinder(afAdapter),
+                     false /* allowIsolated */, IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    AudioSystem::get_audio_flinger_for_fuzzer();
+    const auto audioPolicyService = sp<AudioPolicyService>::make();
+
+    CHECK_EQ(NO_ERROR,
+             gFakeServiceManager->addService(String16("media.audio_policy"), audioPolicyService,
+                                             false /* allowIsolated */,
+                                             IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+
+    fuzzService(media::IAudioPolicyService::asBinder(audioPolicyService),
+                FuzzedDataProvider(data, size));
+
+    return 0;
+}
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 44648de..884ad96 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -337,7 +337,7 @@
                     outputsToReopenWithDevices.emplace(mOutputs.keyAt(i), newDevices);
                     continue;
                 }
-                setOutputDevices(desc, newDevices, force, 0);
+                setOutputDevices(__func__, desc, newDevices, force, 0);
             }
             if (!desc->isDuplicated() && desc->mProfile->hasDynamicAudioProfile() &&
                     !activeMediaDevices.empty() && desc->devices() != activeMediaDevices &&
@@ -459,9 +459,9 @@
 status_t AudioPolicyManager::deviceToAudioPort(audio_devices_t device, const char* device_address,
                                                const char* device_name,
                                                media::AudioPortFw* aidlPort) {
-    DeviceDescriptorBase devDescr(device, device_address);
-    devDescr.setName(device_name);
-    return devDescr.writeToParcelable(aidlPort);
+    const auto devDescr = sp<DeviceDescriptorBase>::make(device, device_address);
+    devDescr->setName(device_name);
+    return devDescr->writeToParcelable(aidlPort);
 }
 
 void AudioPolicyManager::setEngineDeviceConnectionState(const sp<DeviceDescriptor> device,
@@ -728,7 +728,7 @@
     // Use legacy routing method for voice calls via setOutputDevice() on primary output.
     // Otherwise, create two audio patches for TX and RX path.
     if (!createRxPatch) {
-        muteWaitMs = setOutputDevices(mPrimaryOutput, rxDevices, true, delayMs);
+        muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
         connectTelephonyRxAudioSource();
         // If the TX device is on the primary HW module but RX device is
@@ -888,7 +888,7 @@
                 disconnectTelephonyAudioSource(mCallRxSourceClient);
                 disconnectTelephonyAudioSource(mCallTxSourceClient);
             }
-            setOutputDevices(mPrimaryOutput, rxDevices, force, 0);
+            setOutputDevices(__func__, mPrimaryOutput, rxDevices, force, 0);
         }
     }
 
@@ -906,7 +906,7 @@
                 outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
                 continue;
             }
-            setOutputDevices(desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
+            setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
     }
@@ -1325,10 +1325,15 @@
         AudioProfileVector profiles;
         status_t ret = getProfilesForDevices(outputDevices, profiles, *flags, false /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         return INVALID_OPERATION;
@@ -2334,7 +2339,8 @@
             return DEAD_OBJECT;
         }
         const uint32_t muteWaitMs =
-                setOutputDevices(outputDesc, devices, force, 0, nullptr, requiresMuteCheck);
+                setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
+                                 requiresMuteCheck);
 
         // apply volume rules for current stream and device if necessary
         auto &curves = getVolumeCurves(client->attributes());
@@ -2417,7 +2423,7 @@
                     outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
                     continue;
                 }
-                setOutputDevices(desc, newDevices, force, delayMs);
+                setOutputDevices(__func__, desc, newDevices, force, delayMs);
                 // re-apply device specific volume if not done by setOutputDevice()
                 if (!force) {
                     applyStreamVolumes(desc, newDevices.types(), delayMs);
@@ -2519,7 +2525,7 @@
             // still contain data that needs to be drained. The latency only covers the audio HAL
             // and kernel buffers. Also the latency does not always include additional delay in the
             // audio path (audio DSP, CODEC ...)
-            setOutputDevices(outputDesc, newDevices, false, outputDesc->latency()*2,
+            setOutputDevices(__func__, outputDesc, newDevices, false, outputDesc->latency()*2,
                              nullptr, true /*requiresMuteCheck*/, requiresVolumeCheck);
 
             // force restoring the device selection on other active outputs if it differs from the
@@ -2542,7 +2548,7 @@
                         outputsToReopen.emplace(mOutputs.keyAt(i), newDevices2);
                         continue;
                     }
-                    setOutputDevices(desc, newDevices2, force, delayMs);
+                    setOutputDevices(__func__, desc, newDevices2, force, delayMs);
 
                     // re-apply device specific volume if not done by setOutputDevice()
                     if (!force) {
@@ -2645,6 +2651,7 @@
     sp<AudioPolicyMix> policyMix;
     sp<DeviceDescriptor> device;
     sp<AudioInputDescriptor> inputDesc;
+    sp<AudioInputDescriptor> previousInputDesc;
     sp<RecordClientDescriptor> clientDesc;
     audio_port_handle_t requestedDeviceId = *selectedDeviceId;
     uid_t uid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(attributionSource.uid));
@@ -2774,10 +2781,15 @@
         status_t ret = getProfilesForDevices(
                 DeviceVector(device), profiles, flags, true /*isInput*/);
         if (ret == NO_ERROR && !profiles.empty()) {
-            config->channel_mask = profiles[0]->getChannels().empty() ? config->channel_mask
-                    : *profiles[0]->getChannels().begin();
-            config->sample_rate = profiles[0]->getSampleRates().empty() ? config->sample_rate
-                    : *profiles[0]->getSampleRates().begin();
+            const auto channels = profiles[0]->getChannels();
+            if (!channels.empty() && (channels.find(config->channel_mask) == channels.end())) {
+                config->channel_mask = *channels.begin();
+            }
+            const auto sampleRates = profiles[0]->getSampleRates();
+            if (!sampleRates.empty() &&
+                    (sampleRates.find(config->sample_rate) == sampleRates.end())) {
+                config->sample_rate = *sampleRates.begin();
+            }
             config->format = profiles[0]->getFormat();
         }
         goto error;
@@ -2796,6 +2808,8 @@
                                             requestedDeviceId, attributes.source, flags,
                                             isSoundTrigger);
     inputDesc = mInputs.valueFor(*input);
+    // Move (if found) effect for the client session to its input
+    mEffects.moveEffectsForIo(session, *input, &mInputs, mpClientInterface);
     inputDesc->addClient(clientDesc);
 
     ALOGV("getInputForAttr() returns input %d type %d selectedDeviceId %d for port ID %d",
@@ -3105,7 +3119,7 @@
     ALOGV("%s %d", __FUNCTION__, input);
 
     inputDesc->removeClient(portId);
-
+    mEffects.putOrphanEffects(client->session(), input, &mInputs, mpClientInterface);
     if (inputDesc->getClientCount() > 0) {
         ALOGV("%s(%d) %zu clients remaining", __func__, portId, inputDesc->getClientCount());
         return;
@@ -3467,8 +3481,8 @@
     }
 
     if (output != mMusicEffectOutput) {
-        mEffects.moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output);
-        mpClientInterface->moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output);
+        mEffects.moveEffects(AUDIO_SESSION_OUTPUT_MIX, mMusicEffectOutput, output,
+                mpClientInterface);
         mMusicEffectOutput = output;
     }
 
@@ -3938,8 +3952,9 @@
                 outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
                 continue;
             }
-            waitMs = setOutputDevices(outputDesc, newDevices, forceRouting, delayMs, nullptr,
-                                      !skipDelays /*requiresMuteCheck*/,
+
+            waitMs = setOutputDevices(__func__, outputDesc, newDevices, forceRouting, delayMs,
+                                       nullptr, !skipDelays /*requiresMuteCheck*/,
                                       !forceRouting /*requiresVolumeCheck*/, skipDelays);
             // Only apply special touch sound delay once
             delayMs = 0;
@@ -4926,7 +4941,7 @@
         // TODO: reconfigure output format and channels here
         ALOGV("%s setting device %s on output %d",
               __func__, dumpDeviceTypes(devices.types()).c_str(), outputDesc->mIoHandle);
-        setOutputDevices(outputDesc, devices, true, 0, handle);
+        setOutputDevices(__func__, outputDesc, devices, true, 0, handle);
         index = mAudioPatches.indexOfKey(*handle);
         if (index >= 0) {
             if (patchDesc != 0 && patchDesc != mAudioPatches.valueAt(index)) {
@@ -5185,7 +5200,7 @@
             return BAD_VALUE;
         }
 
-        setOutputDevices(outputDesc,
+        setOutputDevices(__func__, outputDesc,
                          getNewOutputDevices(outputDesc, true /*fromCache*/),
                          true,
                          0,
@@ -5239,7 +5254,7 @@
             //      3 / Inactive Output previously hosting SwBridge that can be closed.
             bool updateDevice = outputDesc->isActive() || !sourceDesc->useSwBridge() ||
                     sourceDesc->canCloseOutput();
-            setOutputDevices(outputDesc,
+            setOutputDevices(__func__, outputDesc,
                              updateDevice ? getNewOutputDevices(outputDesc, true /*fromCache*/) :
                                             outputDesc->devices(),
                              force,
@@ -5376,7 +5391,7 @@
                 outputsToReopen.emplace(mOutputs.keyAt(j), newDevices);
                 continue;
             }
-            setOutputDevices(outputDesc, newDevices, false);
+            setOutputDevices(__func__, outputDesc, newDevices, false);
         }
     }
     reopenOutputsWithDevices(outputsToReopen);
@@ -6258,7 +6273,7 @@
                 outputDesc->close();
             } else {
                 addOutput(output, outputDesc);
-                setOutputDevices(outputDesc,
+                setOutputDevices(__func__, outputDesc,
                                  DeviceVector(supportedDevice),
                                  true,
                                  0,
@@ -6458,8 +6473,8 @@
                 if (device_distinguishes_on_address(deviceType)) {
                     ALOGV("checkOutputsForDevice(): setOutputDevices %s",
                             device->toString().c_str());
-                    setOutputDevices(desc, DeviceVector(device), true/*force*/, 0/*delay*/,
-                                     NULL/*patch handle*/);
+                    setOutputDevices(__func__, desc, DeviceVector(device), true/*force*/,
+                                      0/*delay*/, NULL/*patch handle*/);
                 }
                 ALOGV("checkOutputsForDevice(): adding output %d", output);
             }
@@ -7342,7 +7357,7 @@
                 if (!desc->supportedDevices().containsAtLeastOne(outputDesc->supportedDevices())) {
                     continue;
                 }
-                ALOGVV("%s() %s (curDevice %s)", __func__,
+                ALOGVV("%s() output %s %s (curDevice %s)", __func__, desc->info().c_str(),
                       mute ? "muting" : "unmuting", curDevices.toString().c_str());
                 setStrategyMute(productStrategy, mute, desc, mute ? 0 : delayMs);
                 if (desc->isStrategyActive(productStrategy)) {
@@ -7395,7 +7410,8 @@
     return 0;
 }
 
-uint32_t AudioPolicyManager::setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
+uint32_t AudioPolicyManager::setOutputDevices(const char *caller,
+                                              const sp<SwAudioOutputDescriptor>& outputDesc,
                                               const DeviceVector &devices,
                                               bool force,
                                               int delayMs,
@@ -7404,13 +7420,15 @@
                                               bool skipMuteDelay)
 {
     // TODO(b/262404095): Consider if the output need to be reopened.
-    ALOGV("%s device %s delayMs %d", __func__, devices.toString().c_str(), delayMs);
+    std::string logPrefix = std::string("caller ") + caller + outputDesc->info();
+    ALOGV("%s %s device %s delayMs %d", __func__, logPrefix.c_str(),
+          devices.toString().c_str(), delayMs);
     uint32_t muteWaitMs;
 
     if (outputDesc->isDuplicated()) {
-        muteWaitMs = setOutputDevices(outputDesc->subOutput1(), devices, force, delayMs,
+        muteWaitMs = setOutputDevices(__func__, outputDesc->subOutput1(), devices, force, delayMs,
                 nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
-        muteWaitMs += setOutputDevices(outputDesc->subOutput2(), devices, force, delayMs,
+        muteWaitMs += setOutputDevices(__func__, outputDesc->subOutput2(), devices, force, delayMs,
                 nullptr /* patchHandle */, requiresMuteCheck, skipMuteDelay);
         return muteWaitMs;
     }
@@ -7420,7 +7438,8 @@
     DeviceVector prevDevices = outputDesc->devices();
     DeviceVector availPrevDevices = mAvailableOutputDevices.filter(prevDevices);
 
-    ALOGV("setOutputDevices() prevDevice %s", prevDevices.toString().c_str());
+    ALOGV("%s %s prevDevice %s", __func__, logPrefix.c_str(),
+          prevDevices.toString().c_str());
 
     if (!filteredDevices.isEmpty()) {
         outputDesc->setDevices(filteredDevices);
@@ -7430,7 +7449,8 @@
     if (requiresMuteCheck) {
         muteWaitMs = checkDeviceMuteStrategies(outputDesc, prevDevices, delayMs);
     } else {
-        ALOGV("%s: suppressing checkDeviceMuteStrategies", __func__);
+        ALOGV("%s: %s suppressing checkDeviceMuteStrategies", __func__,
+              logPrefix.c_str());
         muteWaitMs = 0;
     }
 
@@ -7440,7 +7460,8 @@
     // output profile or if new device is not supported AND previous device(s) is(are) still
     // available (otherwise reset device must be done on the output)
     if (!devices.isEmpty() && filteredDevices.isEmpty() && !availPrevDevices.empty()) {
-        ALOGV("%s: unsupported device %s for output", __func__, devices.toString().c_str());
+        ALOGV("%s: %s unsupported device %s for output", __func__, logPrefix.c_str(),
+              devices.toString().c_str());
         // restore previous device after evaluating strategy mute state
         outputDesc->setDevices(prevDevices);
         return muteWaitMs;
@@ -7453,16 +7474,19 @@
     //  AND the output is connected by a valid audio patch.
     // Doing this check here allows the caller to call setOutputDevices() without conditions
     if ((filteredDevices.isEmpty() || filteredDevices == prevDevices) && !force && outputRouted) {
-        ALOGV("%s setting same device %s or null device, force=%d, patch handle=%d", __func__,
-              filteredDevices.toString().c_str(), force, outputDesc->getPatchHandle());
+        ALOGV("%s %s setting same device %s or null device, force=%d, patch handle=%d",
+              __func__, logPrefix.c_str(), filteredDevices.toString().c_str(), force,
+              outputDesc->getPatchHandle());
         if (requiresVolumeCheck && !filteredDevices.isEmpty()) {
-            ALOGV("%s setting same device on routed output, force apply volumes", __func__);
+            ALOGV("%s %s setting same device on routed output, force apply volumes",
+                  __func__, logPrefix.c_str());
             applyStreamVolumes(outputDesc, filteredDevices.types(), delayMs, true /*force*/);
         }
         return muteWaitMs;
     }
 
-    ALOGV("%s changing device to %s", __func__, filteredDevices.toString().c_str());
+    ALOGV("%s %s changing device to %s", __func__, logPrefix.c_str(),
+          filteredDevices.toString().c_str());
 
     // do the routing
     if (filteredDevices.isEmpty() || mAvailableOutputDevices.filter(filteredDevices).empty()) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 863c785..509cc79 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -526,6 +526,7 @@
 
         /**
          * @brief setOutputDevices change the route of the specified output.
+         * @param caller of the method
          * @param outputDesc to be considered
          * @param device to be considered to route the output
          * @param force if true, force the routing even if no change.
@@ -539,7 +540,8 @@
          * @return the number of ms we have slept to allow new routing to take effect in certain
          *        cases.
          */
-        uint32_t setOutputDevices(const sp<SwAudioOutputDescriptor>& outputDesc,
+        uint32_t setOutputDevices(const char *caller,
+                                  const sp<SwAudioOutputDescriptor>& outputDesc,
                                   const DeviceVector &device,
                                   bool force = false,
                                   int delayMs = 0,
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index f4fc8f1..c674909 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -7,26 +7,8 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_library_shared {
-    name: "libaudiopolicyservice",
-
-    defaults: [
-        "latest_android_media_audio_common_types_cpp_shared",
-    ],
-
-    srcs: [
-        "AudioPolicyClientImpl.cpp",
-        "AudioPolicyEffects.cpp",
-        "AudioPolicyInterfaceImpl.cpp",
-        "AudioPolicyService.cpp",
-        "CaptureStateNotifier.cpp",
-        "Spatializer.cpp",
-        "SpatializerPoseController.cpp",
-    ],
-
-    include_dirs: [
-        "frameworks/av/services/audioflinger"
-    ],
+cc_defaults {
+    name: "libaudiopolicyservice_dependencies",
 
     shared_libs: [
         "libactivitymanager_aidl",
@@ -41,7 +23,6 @@
         "libaudioutils",
         "libbinder",
         "libcutils",
-        "libeffectsconfig",
         "libhardware_legacy",
         "libheadtracking",
         "libheadtracking-binding",
@@ -67,6 +48,36 @@
     ],
 
     static_libs: [
+        "libeffectsconfig",
+        "libaudiopolicycomponents",
+    ]
+}
+
+cc_library {
+    name: "libaudiopolicyservice",
+
+    defaults: [
+        "libaudiopolicyservice_dependencies",
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+
+    srcs: [
+        "AudioRecordClient.cpp",
+        "AudioPolicyClientImpl.cpp",
+        "AudioPolicyEffects.cpp",
+        "AudioPolicyInterfaceImpl.cpp",
+        "AudioPolicyService.cpp",
+        "CaptureStateNotifier.cpp",
+        "Spatializer.cpp",
+        "SpatializerPoseController.cpp",
+    ],
+
+    include_dirs: [
+        "frameworks/av/services/audioflinger"
+    ],
+
+
+    static_libs: [
         "framework-permission-aidl-cpp",
     ],
 
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index 70a1785..85b7ad9 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -44,10 +44,7 @@
 AudioPolicyEffects::AudioPolicyEffects(const sp<EffectsFactoryHalInterface>& effectsFactoryHal) {
     // load xml config with effectsFactoryHal
     status_t loadResult = loadAudioEffectConfig(effectsFactoryHal);
-    if (loadResult == NO_ERROR) {
-        mDefaultDeviceEffectFuture =
-                std::async(std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
-    } else if (loadResult < 0) {
+    if (loadResult < 0) {
         ALOGW("Failed to query effect configuration, fallback to load .conf");
         // load automatic audio effect modules
         if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) {
@@ -60,6 +57,11 @@
     }
 }
 
+void AudioPolicyEffects::setDefaultDeviceEffects() {
+    mDefaultDeviceEffectFuture = std::async(
+                std::launch::async, &AudioPolicyEffects::initDefaultDeviceEffects, this);
+}
+
 AudioPolicyEffects::~AudioPolicyEffects()
 {
     size_t i = 0;
diff --git a/services/audiopolicy/service/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h
index 9f65a96..e17df48 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.h
+++ b/services/audiopolicy/service/AudioPolicyEffects.h
@@ -117,6 +117,8 @@
     // Remove the default stream effect from wherever it's attached.
     status_t removeStreamDefaultEffect(audio_unique_id_t id);
 
+    void setDefaultDeviceEffects();
+
 private:
     void initDefaultDeviceEffects();
 
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 5d86e7c..a7b2a56 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -18,6 +18,7 @@
 //#define LOG_NDEBUG 0
 
 #include "AudioPolicyService.h"
+#include "AudioRecordClient.h"
 #include "TypeConverter.h"
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 15aced0..72fa245 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,7 +25,6 @@
 #include <sys/time.h>
 #include <dlfcn.h>
 
-#include <android/content/pm/IPackageManagerNative.h>
 #include <audio_utils/clock.h>
 #include <binder/IServiceManager.h>
 #include <utils/Log.h>
@@ -35,6 +34,7 @@
 #include <binder/IResultReceiver.h>
 #include <utils/String16.h>
 #include <utils/threads.h>
+#include "AudioRecordClient.h"
 #include "AudioPolicyService.h"
 #include <hardware_legacy/power.h>
 #include <media/AidlConversion.h>
@@ -216,27 +216,6 @@
 {
     delete interface;
 }
-
-namespace {
-int getTargetSdkForPackageName(std::string_view packageName) {
-    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
-    int targetSdk = -1;
-    if (binder != nullptr) {
-        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
-        if (pm != nullptr) {
-            const auto status = pm->getTargetSdkVersionForPackage(
-                    String16{packageName.data(), packageName.size()}, &targetSdk);
-            ALOGI("Capy check package %s, sdk %d", packageName.data(), targetSdk);
-            return status.isOk() ? targetSdk : -1;
-        }
-    }
-    return targetSdk;
-}
-
-bool doesPackageTargetAtLeastU(std::string_view packageName) {
-    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
-}
-} // anonymous
 // ----------------------------------------------------------------------------
 
 AudioPolicyService::AudioPolicyService()
@@ -327,6 +306,9 @@
         }
     }
     AudioSystem::audioPolicyReady();
+    // AudioFlinger will handle effect creation and register these effects on audio_policy
+    // service. Hence, audio_policy service must be ready.
+    audioPolicyEffects->setDefaultDeviceEffects();
 }
 
 void AudioPolicyService::unloadAudioPolicyManager()
@@ -1186,20 +1168,6 @@
     return false;
 }
 
-/* static */
-bool AudioPolicyService::isAppOpSource(audio_source_t source)
-{
-    switch (source) {
-        case AUDIO_SOURCE_FM_TUNER:
-        case AUDIO_SOURCE_ECHO_REFERENCE:
-        case AUDIO_SOURCE_REMOTE_SUBMIX:
-            return false;
-        default:
-            break;
-    }
-    return true;
-}
-
 void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
 {
     AutoCallerClear acc;
@@ -1900,113 +1868,6 @@
     return binder::Status::ok();
 }
 
-// -----------  AudioPolicyService::OpRecordAudioMonitor implementation ----------
-
-// static
-sp<AudioPolicyService::OpRecordAudioMonitor>
-AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
-            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
-            wp<AudioCommandThread> commandThread)
-{
-    if (isAudioServerOrRootUid(attributionSource.uid)) {
-        ALOGV("not silencing record for audio or root source %s",
-                attributionSource.toString().c_str());
-        return nullptr;
-    }
-
-    if (!AudioPolicyService::isAppOpSource(attr.source)) {
-        ALOGD("not monitoring app op for uid %d and source %d",
-                attributionSource.uid, attr.source);
-        return nullptr;
-    }
-
-    if (!attributionSource.packageName.has_value()
-            || attributionSource.packageName.value().size() == 0) {
-        return nullptr;
-    }
-    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
-}
-
-AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
-        const AttributionSourceState& attributionSource, int32_t appOp,
-        wp<AudioCommandThread> commandThread) :
-            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
-            mCommandThread(commandThread)
-{
-}
-
-AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
-    if (mOpCallback != 0) {
-        mAppOpsManager.stopWatchingMode(mOpCallback);
-    }
-    mOpCallback.clear();
-}
-
-void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
-{
-    checkOp();
-    mOpCallback = new RecordAudioOpCallback(this);
-    ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
-    int flags = doesPackageTargetAtLeastU(
-            mAttributionSource.packageName.value_or("")) ?
-            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
-    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-        mAttributionSource.packageName.value_or(""))),
-        flags,
-        mOpCallback);
-}
-
-bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
-    return mHasOp.load();
-}
-
-// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
-// is updated in AppOp callback and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
-{
-    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
-            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-                mAttributionSource.packageName.value_or(""))));
-    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
-    // verbose logging only log when appOp changed
-    ALOGI_IF(hasIt != mHasOp.load(),
-            "App op %d missing, %ssilencing record %s",
-            mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
-    mHasOp.store(hasIt);
-
-    if (updateUidStates) {
-          sp<AudioCommandThread> commandThread = mCommandThread.promote();
-          if (commandThread != nullptr) {
-              commandThread->updateUidStatesCommand();
-          }
-    }
-}
-
-AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
-        const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
-            const String16& packageName __unused) {
-    sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
-    if (monitor != NULL) {
-        if (op != monitor->getOp()) {
-            return;
-        }
-        monitor->checkOp(true);
-    }
-}
-
-
 // -----------  AudioPolicyService::AudioCommandThread implementation ----------
 
 AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index d0cde64..94b48ea 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -1,4 +1,3 @@
-
 /*
  * Copyright (C) 2009 The Android Open Source Project
  *
@@ -28,7 +27,6 @@
 #include <utils/Vector.h>
 #include <utils/SortedVector.h>
 #include <binder/ActivityManager.h>
-#include <binder/AppOpsManager.h>
 #include <binder/BinderService.h>
 #include <binder/IUidObserver.h>
 #include <system/audio.h>
@@ -64,6 +62,12 @@
 
 // ----------------------------------------------------------------------------
 
+namespace media::audiopolicy {
+    class AudioRecordClient;
+}
+
+using ::android::media::audiopolicy::AudioRecordClient;
+
 class AudioPolicyService :
     public BinderService<AudioPolicyService>,
     public media::BnAudioPolicyService,
@@ -401,7 +405,6 @@
     // Handles binder shell commands
     virtual status_t shellCommand(int in, int out, int err, Vector<String16>& args);
 
-    class AudioRecordClient;
 
     // Sets whether the given UID records only silence
     virtual void setAppState_l(sp<AudioRecordClient> client, app_state_t state) REQUIRES(mLock);
@@ -542,6 +545,7 @@
     // Thread used to send audio config commands to audio flinger
     // For audio config commands, it is necessary because audio flinger requires that the calling
     // process (user) has permission to modify audio settings.
+    public:
     class AudioCommandThread : public Thread {
         class AudioCommand;
     public:
@@ -732,6 +736,7 @@
         wp<AudioPolicyService> mService;
     };
 
+    private:
     class AudioPolicyClient : public AudioPolicyClientInterface
     {
      public:
@@ -906,6 +911,7 @@
               bool                                 mAudioVolumeGroupCallbacksEnabled;
     };
 
+    public:
     class AudioClient : public virtual RefBase {
     public:
                 AudioClient(const audio_attributes_t attributes,
@@ -927,82 +933,8 @@
         const audio_port_handle_t deviceId;  // selected input device port ID
               bool active;                   // Playback/Capture is active or inactive
     };
-
-    // Checks and monitors app ops for AudioRecordClient
-    class OpRecordAudioMonitor : public RefBase {
-    public:
-        ~OpRecordAudioMonitor() override;
-        bool hasOp() const;
-        int32_t getOp() const { return mAppOp; }
-
-        static sp<OpRecordAudioMonitor> createIfNeeded(
-                const AttributionSourceState& attributionSource,
-                const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
-
     private:
-        OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
-                wp<AudioCommandThread> commandThread);
 
-        void onFirstRef() override;
-
-        AppOpsManager mAppOpsManager;
-
-        class RecordAudioOpCallback : public BnAppOpsCallback {
-        public:
-            explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
-            void opChanged(int32_t op, const String16& packageName) override;
-
-        private:
-            const wp<OpRecordAudioMonitor> mMonitor;
-        };
-
-        sp<RecordAudioOpCallback> mOpCallback;
-        // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
-        // in AppOp callback and in onFirstRef()
-        // updateUidStates is true when the silenced state of active AudioRecordClients must be
-        // re-evaluated
-        void checkOp(bool updateUidStates = false);
-
-        std::atomic_bool mHasOp;
-        const AttributionSourceState mAttributionSource;
-        const int32_t mAppOp;
-        wp<AudioCommandThread> mCommandThread;
-    };
-
-    // --- AudioRecordClient ---
-    // Information about each registered AudioRecord client
-    // (between calls to getInputForAttr() and releaseInput())
-    class AudioRecordClient : public AudioClient {
-    public:
-                AudioRecordClient(const audio_attributes_t attributes,
-                          const audio_io_handle_t io,
-                          const audio_session_t session, audio_port_handle_t portId,
-                          const audio_port_handle_t deviceId,
-                          const AttributionSourceState& attributionSource,
-                          bool canCaptureOutput, bool canCaptureHotword,
-                          wp<AudioCommandThread> commandThread) :
-                    AudioClient(attributes, io, attributionSource,
-                        session, portId, deviceId), attributionSource(attributionSource),
-                        startTimeNs(0), canCaptureOutput(canCaptureOutput),
-                        canCaptureHotword(canCaptureHotword), silenced(false),
-                        mOpRecordAudioMonitor(
-                                OpRecordAudioMonitor::createIfNeeded(attributionSource,
-                                attributes, commandThread)) {}
-                ~AudioRecordClient() override = default;
-
-        bool hasOp() const {
-            return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
-        }
-
-        const AttributionSourceState attributionSource; // attribution source of client
-        nsecs_t startTimeNs;
-        const bool canCaptureOutput;
-        const bool canCaptureHotword;
-        bool silenced;
-
-    private:
-        sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
-    };
 
 
     // --- AudioPlaybackClient ---
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
new file mode 100644
index 0000000..a89a84d
--- /dev/null
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/content/pm/IPackageManagerNative.h>
+
+#include "AudioRecordClient.h"
+#include "AudioPolicyService.h"
+
+namespace android::media::audiopolicy {
+
+using android::AudioPolicyService;
+
+namespace {
+bool isAppOpSource(audio_source_t source)
+{
+    switch (source) {
+        case AUDIO_SOURCE_FM_TUNER:
+        case AUDIO_SOURCE_ECHO_REFERENCE:
+        case AUDIO_SOURCE_REMOTE_SUBMIX:
+            return false;
+        default:
+            break;
+    }
+    return true;
+}
+
+int getTargetSdkForPackageName(std::string_view packageName) {
+    const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+    int targetSdk = -1;
+    if (binder != nullptr) {
+        const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+        if (pm != nullptr) {
+            const auto status = pm->getTargetSdkVersionForPackage(
+                    String16{packageName.data(), packageName.size()}, &targetSdk);
+            return status.isOk() ? targetSdk : -1;
+        }
+    }
+    return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+    return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+}
+
+// static
+sp<OpRecordAudioMonitor>
+OpRecordAudioMonitor::createIfNeeded(
+            const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+            wp<AudioPolicyService::AudioCommandThread> commandThread)
+{
+    if (isAudioServerOrRootUid(attributionSource.uid)) {
+        ALOGV("not silencing record for audio or root source %s",
+                attributionSource.toString().c_str());
+        return nullptr;
+    }
+
+    if (!isAppOpSource(attr.source)) {
+        ALOGD("not monitoring app op for uid %d and source %d",
+                attributionSource.uid, attr.source);
+        return nullptr;
+    }
+
+    if (!attributionSource.packageName.has_value()
+            || attributionSource.packageName.value().size() == 0) {
+        return nullptr;
+    }
+    return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+OpRecordAudioMonitor::OpRecordAudioMonitor(
+        const AttributionSourceState& attributionSource, int32_t appOp,
+        wp<AudioPolicyService::AudioCommandThread> commandThread) :
+            mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+            mCommandThread(commandThread)
+{
+}
+
+OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+    if (mOpCallback != 0) {
+        mAppOpsManager.stopWatchingMode(mOpCallback);
+    }
+    mOpCallback.clear();
+}
+
+void OpRecordAudioMonitor::onFirstRef()
+{
+    checkOp();
+    mOpCallback = new RecordAudioOpCallback(this);
+    ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+
+    int flags = doesPackageTargetAtLeastU(
+            mAttributionSource.packageName.value_or("")) ?
+            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
+    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+    // since it controls the mic permission for legacy apps.
+    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+        mAttributionSource.packageName.value_or(""))),
+        flags,
+        mOpCallback);
+}
+
+bool OpRecordAudioMonitor::hasOp() const {
+    return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+    // since it controls the mic permission for legacy apps.
+    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                mAttributionSource.packageName.value_or(""))));
+    const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+    // verbose logging only log when appOp changed
+    ALOGI_IF(hasIt != mHasOp.load(),
+            "App op %d missing, %ssilencing record %s",
+            mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+    mHasOp.store(hasIt);
+
+    if (updateUidStates) {
+          sp<AudioPolicyService::AudioCommandThread> commandThread = mCommandThread.promote();
+          if (commandThread != nullptr) {
+              commandThread->updateUidStatesCommand();
+          }
+    }
+}
+
+OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+        const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+            const String16& packageName __unused) {
+    sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+    if (monitor != NULL) {
+        if (op != monitor->getOp()) {
+            return;
+        }
+        monitor->checkOp(true);
+    }
+}
+
+} // android::media::audiopolicy::internal
diff --git a/services/audiopolicy/service/AudioRecordClient.h b/services/audiopolicy/service/AudioRecordClient.h
new file mode 100644
index 0000000..d3be316
--- /dev/null
+++ b/services/audiopolicy/service/AudioRecordClient.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/content/AttributionSourceState.h>
+#include <binder/AppOpsManager.h>
+#include <system/audio.h>
+#include <utils/RefBase.h>
+
+#include <cstdint>
+
+#include "AudioPolicyService.h"
+
+namespace android::media::audiopolicy {
+
+using ::android::content::AttributionSourceState;
+
+// Checks and monitors app ops for AudioRecordClient
+class OpRecordAudioMonitor : public RefBase {
+public:
+    ~OpRecordAudioMonitor() override;
+    bool hasOp() const;
+    int32_t getOp() const { return mAppOp; }
+
+    static sp<OpRecordAudioMonitor> createIfNeeded(
+            const AttributionSourceState& attributionSource,
+            const audio_attributes_t& attr,
+            wp<AudioPolicyService::AudioCommandThread> commandThread);
+
+private:
+    OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+            wp<AudioPolicyService::AudioCommandThread> commandThread);
+
+    void onFirstRef() override;
+
+    AppOpsManager mAppOpsManager;
+
+    class RecordAudioOpCallback : public BnAppOpsCallback {
+    public:
+        explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+        void opChanged(int32_t op, const String16& packageName) override;
+
+    private:
+        const wp<OpRecordAudioMonitor> mMonitor;
+    };
+
+    sp<RecordAudioOpCallback> mOpCallback;
+    // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+    // in AppOp callback and in onFirstRef()
+    // updateUidStates is true when the silenced state of active AudioRecordClients must be
+    // re-evaluated
+    void checkOp(bool updateUidStates = false);
+
+    std::atomic_bool mHasOp;
+    const AttributionSourceState mAttributionSource;
+    const int32_t mAppOp;
+    wp<AudioPolicyService::AudioCommandThread> mCommandThread;
+};
+
+// --- AudioRecordClient ---
+// Information about each registered AudioRecord client
+// (between calls to getInputForAttr() and releaseInput())
+class AudioRecordClient : public AudioPolicyService::AudioClient {
+public:
+            AudioRecordClient(const audio_attributes_t attributes,
+                      const audio_io_handle_t io,
+                      const audio_session_t session, audio_port_handle_t portId,
+                      const audio_port_handle_t deviceId,
+                      const AttributionSourceState& attributionSource,
+                      bool canCaptureOutput, bool canCaptureHotword,
+                      wp<AudioPolicyService::AudioCommandThread> commandThread) :
+                AudioClient(attributes, io, attributionSource,
+                    session, portId, deviceId), attributionSource(attributionSource),
+                    startTimeNs(0), canCaptureOutput(canCaptureOutput),
+                    canCaptureHotword(canCaptureHotword), silenced(false),
+                    mOpRecordAudioMonitor(
+                            OpRecordAudioMonitor::createIfNeeded(attributionSource,
+                            attributes, commandThread)) {}
+            ~AudioRecordClient() override = default;
+
+    bool hasOp() const {
+        return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+    }
+
+    const AttributionSourceState attributionSource; // attribution source of client
+    nsecs_t startTimeNs;
+    const bool canCaptureOutput;
+    const bool canCaptureHotword;
+    bool silenced;
+
+private:
+    sp<OpRecordAudioMonitor>           mOpRecordAudioMonitor;
+};
+
+}; // namespace android::media::audiopolicy::internal
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 15eae14..7a391b6 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -1905,7 +1905,7 @@
     audio_io_handle_t mOutput;
     audio_stream_type_t mStream = AUDIO_STREAM_DEFAULT;
     audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t mPortId;
+    audio_port_handle_t mPortId = AUDIO_PORT_HANDLE_NONE;
     AudioPolicyInterface::output_type_t mOutputType;
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     bool mIsSpatialized;
@@ -1948,14 +1948,18 @@
 }
 
 TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
-        MmapPlaybackStreamMatchingRenderDapMixSucceeds) {
-      // Add render-only mix matching the test uid.
+        MmapPlaybackStreamMatchingRenderDapMixSupportingMmapSucceeds) {
+    // Add render-only mix matching the test uid.
     const int testUid = 12345;
-    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER, AUDIO_DEVICE_OUT_SPEAKER,
-                                /*mixAddress=*/"", audioConfig, {createUidCriterion(testUid)});
+    // test_audio_policy_configuration.xml declares mmap-capable mix port
+    // for AUDIO_DEVICE_OUT_USB_DEVICE.
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                                AUDIO_DEVICE_OUT_USB_DEVICE, /*mixAddress=*/"",
+                                audioConfig, {createUidCriterion(testUid)});
     ASSERT_EQ(NO_ERROR, ret);
 
-    // Geting output for matching uid should succeed for mmaped stream.
+    // Geting output for matching uid should succeed for mmaped stream, because matched mix
+    // redirects to mmap capable device.
     audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
     ASSERT_EQ(NO_ERROR,
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
@@ -1964,6 +1968,26 @@
                                          &mOutputType, &mIsSpatialized, &mIsBitPerfect));
 }
 
+TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
+        MmapPlaybackStreamMatchingRenderDapMixNotSupportingMmapFails) {
+    // Add render-only mix matching the test uid.
+    const int testUid = 12345;
+    // Per test_audio_policy_configuration.xml AUDIO_DEVICE_OUT_SPEAKER doesn't support mmap.
+    status_t ret = addPolicyMix(MIX_TYPE_PLAYERS, MIX_ROUTE_FLAG_RENDER,
+                                AUDIO_DEVICE_OUT_SPEAKER, /*mixAddress=*/"", audioConfig,
+                                {createUidCriterion(testUid)});
+    ASSERT_EQ(NO_ERROR, ret);
+
+    // Geting output for matching uid should fail for mmaped stream, because
+    // matched mix redirects to device which doesn't support mmap.
+    audio_output_flags_t outputFlags = AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+    ASSERT_EQ(INVALID_OPERATION,
+              mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
+                                         createAttributionSourceState(testUid), &audioConfig,
+                                         &outputFlags, &mSelectedDeviceId, &mPortId, {},
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+}
+
 INSTANTIATE_TEST_SUITE_P(
         MmapPlaybackRerouting, AudioPolicyManagerTestMMapPlaybackRerouting,
         testing::Values(DPMmapTestParam(MIX_ROUTE_FLAG_LOOP_BACK, AUDIO_DEVICE_OUT_REMOTE_SUBMIX,
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 50ca26a..7ab9519 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -55,6 +55,11 @@
                            samplingRates="8000 16000 32000 48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
                 <mixPort name="hifi_output" role="source" flags="AUDIO_OUTPUT_FLAG_BIT_PERFECT"/>
+                <mixPort name="mmap_no_irq_out" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -96,7 +101,7 @@
                 <route type="mix" sink="BT A2DP Out"
                        sources="primary output,hifi_output"/>
                 <route type="mix" sink="USB Device Out"
-                       sources="primary output,hifi_output"/>
+                       sources="primary output,hifi_output,mmap_no_irq_out"/>
             </routes>
         </module>
 
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index a45365a..84dcf26 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -35,9 +35,91 @@
     ],
 }
 
-cc_library_shared {
+cc_defaults {
+    name: "libcameraservice_deps",
+
+    shared_libs: [
+        "libactivitymanager_aidl",
+        "libbase",
+        "libdl",
+        "libui",
+        "liblog",
+        "libutilscallstack",
+        "libutils",
+        "libbinder",
+        "libbinder_ndk",
+        "libactivitymanager_aidl",
+        "libpermission",
+        "libcutils",
+        "libexif",
+        "libmedia",
+        "libmediautils",
+        "libcamera_client",
+        "libcamera_metadata",
+        "libfmq",
+        "libgui",
+        "libhardware",
+        "libhidlbase",
+        "libimage_io",
+        "libjpeg",
+        "libultrahdr",
+        "libmedia_codeclist",
+        "libmedia_omx",
+        "libmemunreachable",
+        "libprocessgroup",
+        "libprocinfo",
+        "libsensorprivacy",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libxml2",
+        "libyuv",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.camera.device@1.0",
+        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
+        "android.hardware.camera.device@3.4",
+        "android.hardware.camera.device@3.5",
+        "android.hardware.camera.device@3.6",
+        "android.hardware.camera.device@3.7",
+        "android.hardware.common-V2-ndk",
+        "android.hardware.common.fmq-V1-ndk",
+        "android.hardware.graphics.common-V4-ndk",
+        "media_permission-aidl-cpp",
+    ],
+
+    static_libs: [
+        "android.frameworks.cameraservice.common@2.0",
+        "android.frameworks.cameraservice.service@2.0",
+        "android.frameworks.cameraservice.service@2.1",
+        "android.frameworks.cameraservice.service@2.2",
+        "android.frameworks.cameraservice.device@2.0",
+        "android.frameworks.cameraservice.device@2.1",
+        "android.frameworks.cameraservice.common-V1-ndk",
+        "android.frameworks.cameraservice.service-V1-ndk",
+        "android.frameworks.cameraservice.device-V1-ndk",
+        "android.hardware.camera.common-V1-ndk",
+        "android.hardware.camera.device-V2-ndk",
+        "android.hardware.camera.metadata-V2-ndk",
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libaidlcommonsupport",
+        "libdynamic_depth",
+        "libprocessinfoservice_aidl",
+        "libbinderthreadstateutils",
+        "media_permission-aidl-cpp",
+        "libcameraservice_device_independent",
+    ],
+}
+
+cc_library {
     name: "libcameraservice",
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
     // Camera service source
 
     srcs: [
@@ -105,6 +187,7 @@
         "utils/CameraThreadState.cpp",
         "utils/CameraTraces.cpp",
         "utils/AutoConditionLock.cpp",
+        "utils/SchedulingPolicyUtils.cpp",
         "utils/SessionConfigurationUtils.cpp",
         "utils/SessionConfigurationUtilsHidl.cpp",
         "utils/SessionStatsBuilder.cpp",
@@ -119,73 +202,6 @@
         "libmediametrics_headers",
     ],
 
-    shared_libs: [
-        "libactivitymanager_aidl",
-        "libbase",
-        "libdl",
-        "libexif",
-        "libui",
-        "liblog",
-        "libutilscallstack",
-        "libutils",
-        "libbinder",
-        "libbinder_ndk",
-        "libactivitymanager_aidl",
-        "libpermission",
-        "libcutils",
-        "libmedia",
-        "libmediautils",
-        "libcamera_client",
-        "libcamera_metadata",
-        "libdynamic_depth",
-        "libfmq",
-        "libgui",
-        "libhardware",
-        "libhidlbase",
-        "libimage_io",
-        "libjpeg",
-        "libultrahdr",
-        "libmedia_codeclist",
-        "libmedia_omx",
-        "libmemunreachable",
-        "libsensorprivacy",
-        "libstagefright",
-        "libstagefright_foundation",
-        "libxml2",
-        "libyuv",
-        "android.frameworks.cameraservice.common@2.0",
-        "android.frameworks.cameraservice.service@2.0",
-        "android.frameworks.cameraservice.service@2.1",
-        "android.frameworks.cameraservice.service@2.2",
-        "android.frameworks.cameraservice.device@2.0",
-        "android.frameworks.cameraservice.device@2.1",
-        "android.frameworks.cameraservice.common-V1-ndk",
-        "android.frameworks.cameraservice.service-V1-ndk",
-        "android.frameworks.cameraservice.device-V1-ndk",
-        "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
-        "android.hardware.camera.device@3.2",
-        "android.hardware.camera.device@3.3",
-        "android.hardware.camera.device@3.4",
-        "android.hardware.camera.device@3.5",
-        "android.hardware.camera.device@3.6",
-        "android.hardware.camera.device@3.7",
-        "android.hardware.camera.device-V2-ndk",
-        "media_permission-aidl-cpp",
-    ],
-
-    static_libs: [
-        "libaidlcommonsupport",
-        "libprocessinfoservice_aidl",
-        "libbinderthreadstateutils",
-        "media_permission-aidl-cpp",
-        "libcameraservice_device_independent",
-    ],
-
     export_shared_lib_headers: [
         "libbinder",
         "libactivitymanager_aidl",
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index ffd38be..d9d8a3d 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -47,14 +47,14 @@
 CameraFlashlight::~CameraFlashlight() {
 }
 
-status_t CameraFlashlight::createFlashlightControl(const String8& cameraId) {
+status_t CameraFlashlight::createFlashlightControl(const std::string& cameraId) {
     ALOGV("%s: creating a flash light control for camera %s", __FUNCTION__,
-            cameraId.string());
+            cameraId.c_str());
     if (mFlashControl != NULL) {
         return INVALID_OPERATION;
     }
 
-    if (mProviderManager->supportSetTorchMode(cameraId.string())) {
+    if (mProviderManager->supportSetTorchMode(cameraId)) {
         mFlashControl = new ProviderFlashControl(mProviderManager);
     } else {
         ALOGE("Flashlight control not supported by this device!");
@@ -64,7 +64,7 @@
     return OK;
 }
 
-status_t CameraFlashlight::setTorchMode(const String8& cameraId, bool enabled) {
+status_t CameraFlashlight::setTorchMode(const std::string& cameraId, bool enabled) {
     if (!mFlashlightMapInitialized) {
         ALOGE("%s: findFlashUnits() must be called before this method.",
                __FUNCTION__);
@@ -72,7 +72,7 @@
     }
 
     ALOGV("%s: set torch mode of camera %s to %d", __FUNCTION__,
-            cameraId.string(), enabled);
+            cameraId.c_str(), enabled);
 
     status_t res = OK;
     Mutex::Autolock l(mLock);
@@ -87,7 +87,7 @@
         // TODO: Move torch status checks and state updates behind this CameraFlashlight lock
         // to avoid other similar race conditions.
         ALOGE("%s: Camera device %s is in use, cannot set torch mode.",
-                __FUNCTION__, cameraId.string());
+                __FUNCTION__, cameraId.c_str());
         return -EBUSY;
     }
 
@@ -117,7 +117,7 @@
     return res;
 }
 
-status_t CameraFlashlight::turnOnTorchWithStrengthLevel(const String8& cameraId,
+status_t CameraFlashlight::turnOnTorchWithStrengthLevel(const std::string& cameraId,
             int32_t torchStrength) {
     if (!mFlashlightMapInitialized) {
         ALOGE("%s: findFlashUnits() must be called before this method.",
@@ -126,13 +126,13 @@
     }
 
     ALOGV("%s: set torch strength of camera %s to %d", __FUNCTION__,
-            cameraId.string(), torchStrength);
+            cameraId.c_str(), torchStrength);
     status_t res = OK;
     Mutex::Autolock l(mLock);
 
     if (mOpenedCameraIds.indexOf(cameraId) != NAME_NOT_FOUND) {
         ALOGE("%s: Camera device %s is in use, cannot be turned ON.",
-                __FUNCTION__, cameraId.string());
+                __FUNCTION__, cameraId.c_str());
         return -EBUSY;
     }
 
@@ -148,7 +148,7 @@
 }
 
 
-status_t CameraFlashlight::getTorchStrengthLevel(const String8& cameraId,
+status_t CameraFlashlight::getTorchStrengthLevel(const std::string& cameraId,
             int32_t* torchStrength) {
     status_t res = OK;
     if (!mFlashlightMapInitialized) {
@@ -174,13 +174,13 @@
     Mutex::Autolock l(mLock);
     status_t res;
 
-    std::vector<String8> cameraIds;
+    std::vector<std::string> cameraIds;
     std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
     int numberOfCameras = static_cast<int>(ids.size());
     cameraIds.resize(numberOfCameras);
     // No module, must be provider
     for (size_t i = 0; i < cameraIds.size(); i++) {
-        cameraIds[i] = String8(ids[i].c_str());
+        cameraIds[i] = ids[i];
     }
 
     mFlashControl.clear();
@@ -195,17 +195,17 @@
         res = createFlashlightControl(id);
         if (res) {
             ALOGE("%s: failed to create flash control for %s", __FUNCTION__,
-                    id.string());
+                    id.c_str());
         } else {
             res = mFlashControl->hasFlashUnit(id, &hasFlash);
             if (res == -EUSERS || res == -EBUSY) {
                 ALOGE("%s: failed to check if camera %s has a flash unit. Some "
                         "camera devices may be opened", __FUNCTION__,
-                        id.string());
+                        id.c_str());
                 return res;
             } else if (res) {
                 ALOGE("%s: failed to check if camera %s has a flash unit. %s"
-                        " (%d)", __FUNCTION__, id.string(), strerror(-res),
+                        " (%d)", __FUNCTION__, id.c_str(), strerror(-res),
                         res);
             }
 
@@ -218,12 +218,12 @@
     return OK;
 }
 
-bool CameraFlashlight::hasFlashUnit(const String8& cameraId) {
+bool CameraFlashlight::hasFlashUnit(const std::string& cameraId) {
     Mutex::Autolock l(mLock);
     return hasFlashUnitLocked(cameraId);
 }
 
-bool CameraFlashlight::hasFlashUnitLocked(const String8& cameraId) {
+bool CameraFlashlight::hasFlashUnitLocked(const std::string& cameraId) {
     if (!mFlashlightMapInitialized) {
         ALOGE("%s: findFlashUnits() must be called before this method.",
                __FUNCTION__);
@@ -234,23 +234,23 @@
     if (index == NAME_NOT_FOUND) {
         // Might be external camera
         ALOGW("%s: camera %s not present when findFlashUnits() was called",
-                __FUNCTION__, cameraId.string());
+                __FUNCTION__, cameraId.c_str());
         return false;
     }
 
     return mHasFlashlightMap.valueAt(index);
 }
 
-bool CameraFlashlight::isBackwardCompatibleMode(const String8& cameraId) {
+bool CameraFlashlight::isBackwardCompatibleMode(const std::string& cameraId) {
     bool backwardCompatibleMode = false;
     if (mProviderManager != nullptr &&
-            !mProviderManager->supportSetTorchMode(cameraId.string())) {
+            !mProviderManager->supportSetTorchMode(cameraId)) {
         backwardCompatibleMode = true;
     }
     return backwardCompatibleMode;
 }
 
-status_t CameraFlashlight::prepareDeviceOpen(const String8& cameraId) {
+status_t CameraFlashlight::prepareDeviceOpen(const std::string& cameraId) {
     ALOGV("%s: prepare for device open", __FUNCTION__);
 
     Mutex::Autolock l(mLock);
@@ -270,10 +270,9 @@
             std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
             int numCameras = static_cast<int>(ids.size());
             for (int i = 0; i < numCameras; i++) {
-                String8 id8(ids[i].c_str());
-                if (hasFlashUnitLocked(id8)) {
+                if (hasFlashUnitLocked(ids[i])) {
                     mCallbacks->onTorchStatusChanged(
-                            id8, TorchModeStatus::NOT_AVAILABLE);
+                            ids[i], TorchModeStatus::NOT_AVAILABLE);
                 }
             }
         }
@@ -289,8 +288,8 @@
     return OK;
 }
 
-status_t CameraFlashlight::deviceClosed(const String8& cameraId) {
-    ALOGV("%s: device %s is closed", __FUNCTION__, cameraId.string());
+status_t CameraFlashlight::deviceClosed(const std::string& cameraId) {
+    ALOGV("%s: device %s is closed", __FUNCTION__, cameraId.c_str());
 
     Mutex::Autolock l(mLock);
     if (!mFlashlightMapInitialized) {
@@ -302,7 +301,7 @@
     ssize_t index = mOpenedCameraIds.indexOf(cameraId);
     if (index == NAME_NOT_FOUND) {
         ALOGE("%s: couldn't find camera %s in the opened list", __FUNCTION__,
-                cameraId.string());
+                cameraId.c_str());
     } else {
         mOpenedCameraIds.removeAt(index);
     }
@@ -316,10 +315,9 @@
         std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
         int numCameras = static_cast<int>(ids.size());
         for (int i = 0; i < numCameras; i++) {
-            String8 id8(ids[i].c_str());
-            if (hasFlashUnitLocked(id8)) {
+            if (hasFlashUnitLocked(ids[i])) {
                 mCallbacks->onTorchStatusChanged(
-                        id8, TorchModeStatus::AVAILABLE_OFF);
+                        ids[i], TorchModeStatus::AVAILABLE_OFF);
             }
         }
     }
@@ -343,35 +341,35 @@
 ProviderFlashControl::~ProviderFlashControl() {
 }
 
-status_t ProviderFlashControl::hasFlashUnit(const String8& cameraId, bool *hasFlash) {
+status_t ProviderFlashControl::hasFlashUnit(const std::string& cameraId, bool *hasFlash) {
     if (!hasFlash) {
         return BAD_VALUE;
     }
-    *hasFlash = mProviderManager->hasFlashUnit(cameraId.string());
+    *hasFlash = mProviderManager->hasFlashUnit(cameraId);
     return OK;
 }
 
-status_t ProviderFlashControl::setTorchMode(const String8& cameraId, bool enabled) {
+status_t ProviderFlashControl::setTorchMode(const std::string& cameraId, bool enabled) {
     ALOGV("%s: set camera %s torch mode to %d", __FUNCTION__,
-            cameraId.string(), enabled);
+            cameraId.c_str(), enabled);
 
-    return mProviderManager->setTorchMode(cameraId.string(), enabled);
+    return mProviderManager->setTorchMode(cameraId, enabled);
 }
 
-status_t ProviderFlashControl::turnOnTorchWithStrengthLevel(const String8& cameraId,
+status_t ProviderFlashControl::turnOnTorchWithStrengthLevel(const std::string& cameraId,
             int32_t torchStrength) {
     ALOGV("%s: change torch strength level of camera %s to %d", __FUNCTION__,
-            cameraId.string(), torchStrength);
+            cameraId.c_str(), torchStrength);
 
-    return mProviderManager->turnOnTorchWithStrengthLevel(cameraId.string(), torchStrength);
+    return mProviderManager->turnOnTorchWithStrengthLevel(cameraId, torchStrength);
 }
 
-status_t ProviderFlashControl::getTorchStrengthLevel(const String8& cameraId,
+status_t ProviderFlashControl::getTorchStrengthLevel(const std::string& cameraId,
             int32_t* torchStrength) {
     ALOGV("%s: get torch strength level of camera %s", __FUNCTION__,
-            cameraId.string());
+            cameraId.c_str());
 
-    return mProviderManager->getTorchStrengthLevel(cameraId.string(), torchStrength);
+    return mProviderManager->getTorchStrengthLevel(cameraId, torchStrength);
 }
 // ProviderFlashControl implementation ends
 
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 1703ddc..733c928 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H
 #define ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H
 
+#include <string>
 #include <gui/GLConsumer.h>
 #include <gui/Surface.h>
 #include <utils/KeyedVector.h>
@@ -38,20 +39,21 @@
         // cause the torch mode to be turned off in HAL v1 devices. If
         // previously-on torch mode is turned off,
         // callbacks.torch_mode_status_change() should be invoked.
-        virtual status_t hasFlashUnit(const String8& cameraId,
+        virtual status_t hasFlashUnit(const std::string& cameraId,
                     bool *hasFlash) = 0;
 
         // set the torch mode to on or off.
-        virtual status_t setTorchMode(const String8& cameraId,
+        virtual status_t setTorchMode(const std::string& cameraId,
                     bool enabled) = 0;
 
         // Change the brightness level of the torch. If the torch is OFF and
         // torchStrength >= 1, then the torch will also be turned ON.
-        virtual status_t turnOnTorchWithStrengthLevel(const String8& cameraId,
+        virtual status_t turnOnTorchWithStrengthLevel(const std::string& cameraId,
                     int32_t torchStrength) = 0;
 
         // Returns the torch strength level.
-        virtual status_t getTorchStrengthLevel(const String8& cameraId, int32_t* torchStrength) = 0;
+        virtual status_t getTorchStrengthLevel(const std::string& cameraId,
+                int32_t* torchStrength) = 0;
 };
 
 /**
@@ -70,49 +72,49 @@
 
         // Whether a camera device has a flash unit. Before findFlashUnits() is
         // called, this function always returns false.
-        bool hasFlashUnit(const String8& cameraId);
+        bool hasFlashUnit(const std::string& cameraId);
 
         // set the torch mode to on or off.
-        status_t setTorchMode(const String8& cameraId, bool enabled);
+        status_t setTorchMode(const std::string& cameraId, bool enabled);
 
         // Change the torch strength level of the flash unit in torch mode.
-        status_t turnOnTorchWithStrengthLevel(const String8& cameraId, int32_t torchStrength);
+        status_t turnOnTorchWithStrengthLevel(const std::string& cameraId, int32_t torchStrength);
 
         // Get the torch strength level
-        status_t getTorchStrengthLevel(const String8& cameraId, int32_t* torchStrength);
+        status_t getTorchStrengthLevel(const std::string& cameraId, int32_t* torchStrength);
 
         // Notify CameraFlashlight that camera service is going to open a camera
         // device. CameraFlashlight will free the resources that may cause the
         // camera open to fail. Camera service must call this function before
         // opening a camera device.
-        status_t prepareDeviceOpen(const String8& cameraId);
+        status_t prepareDeviceOpen(const std::string& cameraId);
 
         // Notify CameraFlashlight that camera service has closed a camera
         // device. CameraFlashlight may invoke callbacks for torch mode
         // available depending on the implementation.
-        status_t deviceClosed(const String8& cameraId);
+        status_t deviceClosed(const std::string& cameraId);
 
     private:
         // create flashlight control based on camera module API and camera
         // device API versions.
-        status_t createFlashlightControl(const String8& cameraId);
+        status_t createFlashlightControl(const std::string& cameraId);
 
         // mLock should be locked.
-        bool hasFlashUnitLocked(const String8& cameraId);
+        bool hasFlashUnitLocked(const std::string& cameraId);
 
         // Check if flash control is in backward compatible mode (simulated torch API by
         // opening cameras)
-        bool isBackwardCompatibleMode(const String8& cameraId);
+        bool isBackwardCompatibleMode(const std::string& cameraId);
 
         sp<FlashControlBase> mFlashControl;
 
         sp<CameraProviderManager> mProviderManager;
 
         CameraProviderManager::StatusListener* mCallbacks;
-        SortedVector<String8> mOpenedCameraIds;
+        SortedVector<std::string> mOpenedCameraIds;
 
         // camera id -> if it has a flash unit
-        KeyedVector<String8, bool> mHasFlashlightMap;
+        KeyedVector<std::string, bool> mHasFlashlightMap;
         bool mFlashlightMapInitialized;
 
         Mutex mLock; // protect CameraFlashlight API
@@ -127,10 +129,10 @@
         virtual ~ProviderFlashControl();
 
         // FlashControlBase
-        status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
-        status_t setTorchMode(const String8& cameraId, bool enabled);
-        status_t turnOnTorchWithStrengthLevel(const String8& cameraId, int32_t torchStrength);
-        status_t getTorchStrengthLevel(const String8& cameraId, int32_t* torchStrength);
+        status_t hasFlashUnit(const std::string& cameraId, bool *hasFlash);
+        status_t setTorchMode(const std::string& cameraId, bool enabled);
+        status_t turnOnTorchWithStrengthLevel(const std::string& cameraId, int32_t torchStrength);
+        status_t getTorchStrengthLevel(const std::string& cameraId, int32_t* torchStrength);
 
     private:
         sp<CameraProviderManager> mProviderManager;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 1b1662b..ed321b6 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -24,6 +24,8 @@
 #include <cstdlib>
 #include <cstring>
 #include <ctime>
+#include <iostream>
+#include <sstream>
 #include <string>
 #include <sys/types.h>
 #include <inttypes.h>
@@ -36,7 +38,6 @@
 #include <aidl/AidlCameraService.h>
 #include <android-base/macros.h>
 #include <android-base/parseint.h>
-#include <android-base/stringprintf.h>
 #include <android/permission/PermissionChecker.h>
 #include <binder/ActivityManager.h>
 #include <binder/AppOpsManager.h>
@@ -71,6 +72,7 @@
 #include <system/camera_metadata.h>
 #include <binder/IServiceManager.h>
 #include <binder/IActivityManager.h>
+#include <camera/StringUtils.h>
 
 #include <system/camera.h>
 
@@ -87,11 +89,11 @@
     const char* kActivityServiceName = "activity";
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
+    const char* kProcessInfoServiceName = "processinfo";
 }; // namespace anonymous
 
 namespace android {
 
-using base::StringPrintf;
 using binder::Status;
 using namespace camera3;
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
@@ -116,28 +118,17 @@
     android_atomic_write(level, &gLogLevel);
 }
 
-// Convenience methods for constructing binder::Status objects for error returns
-
-#define STATUS_ERROR(errorCode, errorString) \
-    binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
-
-#define STATUS_ERROR_FMT(errorCode, errorString, ...) \
-    binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
-                    __VA_ARGS__))
-
 // ----------------------------------------------------------------------------
 
-static const String16 sDumpPermission("android.permission.DUMP");
-static const String16 sManageCameraPermission("android.permission.MANAGE_CAMERA");
-static const String16 sCameraPermission("android.permission.CAMERA");
-static const String16 sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
-static const String16
+static const std::string sDumpPermission("android.permission.DUMP");
+static const std::string sManageCameraPermission("android.permission.MANAGE_CAMERA");
+static const std::string sCameraPermission("android.permission.CAMERA");
+static const std::string sSystemCameraPermission("android.permission.SYSTEM_CAMERA");
+static const std::string
         sCameraSendSystemEventsPermission("android.permission.CAMERA_SEND_SYSTEM_EVENTS");
-static const String16 sCameraOpenCloseListenerPermission(
+static const std::string sCameraOpenCloseListenerPermission(
         "android.permission.CAMERA_OPEN_CLOSE_LISTENER");
-static const String16
+static const std::string
         sCameraInjectExternalCameraPermission("android.permission.CAMERA_INJECT_EXTERNAL_CAMERA");
 // Constant integer for FGS Logging, used to denote the API type for logger
 static const int LOG_FGS_CAMERA_API = 1;
@@ -145,12 +136,13 @@
 static constexpr int32_t kSystemNativeClientScore = resource_policy::PERCEPTIBLE_APP_ADJ;
 static constexpr int32_t kSystemNativeClientState =
         ActivityManager::PROCESS_STATE_PERSISTENT_UI;
+static const std::string kServiceName("cameraserver");
 
-const String8 CameraService::kOfflineDevice("offline-");
-const String16 CameraService::kWatchAllClientsFlag("all");
+const std::string CameraService::kOfflineDevice("offline-");
+const std::string CameraService::kWatchAllClientsFlag("all");
 
 // Set to keep track of logged service error events.
-static std::set<String8> sServiceErrorEventSet;
+static std::set<std::string> sServiceErrorEventSet;
 
 CameraService::CameraService(
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
@@ -181,7 +173,7 @@
 }
 
 void CameraService::onServiceRegistration(const String16& name, const sp<IBinder>&) {
-    if (name != String16(kAppopsServiceName)) {
+    if (name != toString16(kAppopsServiceName)) {
         return;
     }
 
@@ -219,9 +211,9 @@
     // boot availability for cameraservice, use checkService which is
     // non blocking and register for notifications
     sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->checkService(String16(kAppopsServiceName));
+    sp<IBinder> binder = sm->checkService(toString16(kAppopsServiceName));
     if (!binder) {
-        sm->registerForNotifications(String16(kAppopsServiceName), this);
+        sm->registerForNotifications(toString16(kAppopsServiceName), this);
     } else {
         mAppOps.setCameraAudioRestriction(mAudioRestriction);
     }
@@ -257,8 +249,8 @@
             if (res != OK) {
                 ALOGE("%s: Unable to initialize camera provider manager: %s (%d)",
                         __FUNCTION__, strerror(-res), res);
-                logServiceError(String8::format("Unable to initialize camera provider manager"),
-                ERROR_DISCONNECTED);
+                logServiceError("Unable to initialize camera provider manager",
+                        ERROR_DISCONNECTED);
                 return res;
             }
         }
@@ -283,14 +275,12 @@
 
 
     for (auto& cameraId : deviceIds) {
-        String8 id8 = String8(cameraId.c_str());
-        if (getCameraState(id8) == nullptr) {
-            onDeviceStatusChanged(id8, CameraDeviceStatus::PRESENT);
+        if (getCameraState(cameraId) == nullptr) {
+            onDeviceStatusChanged(cameraId, CameraDeviceStatus::PRESENT);
         }
         if (unavailPhysicalIds.count(cameraId) > 0) {
             for (const auto& physicalId : unavailPhysicalIds[cameraId]) {
-                String8 physicalId8 = String8(physicalId.c_str());
-                onDeviceStatusChanged(id8, physicalId8, CameraDeviceStatus::NOT_PRESENT);
+                onDeviceStatusChanged(cameraId, physicalId, CameraDeviceStatus::NOT_PRESENT);
             }
         }
     }
@@ -310,7 +300,7 @@
     return OK;
 }
 
-void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status,
+void CameraService::broadcastTorchModeStatus(const std::string& cameraId, TorchModeStatus status,
         SystemCameraKind systemCameraKind) {
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
@@ -321,16 +311,16 @@
             continue;
         }
         auto ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
-                String16{cameraId});
+                cameraId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
         // Also trigger the torch callbacks for cameras that were remapped to the current cameraId
         // for the specific package that this listener belongs to.
-        std::vector<String8> remappedCameraIds =
+        std::vector<std::string> remappedCameraIds =
                 findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
         for (auto& remappedCameraId : remappedCameraIds) {
             ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
-                    String16(remappedCameraId));
+                    std::string(remappedCameraId));
             i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                     __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
         }
@@ -353,7 +343,7 @@
     mNormalDeviceIdsWithoutSystemCamera.clear();
     for (auto &deviceId : normalDeviceIds) {
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-        if (getSystemCameraKind(String8(deviceId.c_str()), &deviceKind) != OK) {
+        if (getSystemCameraKind(deviceId, &deviceKind) != OK) {
             ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, deviceId.c_str());
             continue;
         }
@@ -368,14 +358,15 @@
               mNormalDeviceIdsWithoutSystemCamera.size());
 }
 
-status_t CameraService::getSystemCameraKind(const String8& cameraId, SystemCameraKind *kind) const {
+status_t CameraService::getSystemCameraKind(const std::string& cameraId,
+        SystemCameraKind *kind) const {
     auto state = getCameraState(cameraId);
     if (state != nullptr) {
         *kind = state->getSystemCameraKind();
         return OK;
     }
     // Hidden physical camera ids won't have CameraState
-    return mCameraProviderManager->getSystemCameraKind(cameraId.c_str(), kind);
+    return mCameraProviderManager->getSystemCameraKind(cameraId, kind);
 }
 
 void CameraService::updateCameraNumAndIds() {
@@ -398,9 +389,8 @@
     for (const auto& cameraId : mNormalDeviceIdsWithoutSystemCamera) {
         int facing = -1;
         int orientation = 0;
-        String8 cameraId8(cameraId.c_str());
         int portraitRotation;
-        getDeviceVersion(cameraId8, /*overrideToPortrait*/false, /*out*/&portraitRotation,
+        getDeviceVersion(cameraId, /*overrideToPortrait*/false, /*out*/&portraitRotation,
                 /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
@@ -432,8 +422,7 @@
     }
 }
 
-void CameraService::addStates(const String8 id) {
-    std::string cameraId(id.c_str());
+void CameraService::addStates(const std::string& cameraId) {
     CameraResourceCost cost;
     status_t res = mCameraProviderManager->getResourceCost(cameraId, &cost);
     if (res != OK) {
@@ -448,61 +437,61 @@
     }
     std::vector<std::string> physicalCameraIds;
     mCameraProviderManager->isLogicalCamera(cameraId, &physicalCameraIds);
-    std::set<String8> conflicting;
+    std::set<std::string> conflicting;
     for (size_t i = 0; i < cost.conflictingDevices.size(); i++) {
-        conflicting.emplace(String8(cost.conflictingDevices[i].c_str()));
+        conflicting.emplace(cost.conflictingDevices[i]);
     }
 
     {
         Mutex::Autolock lock(mCameraStatesLock);
-        mCameraStates.emplace(id, std::make_shared<CameraState>(id, cost.resourceCost,
+        mCameraStates.emplace(cameraId, std::make_shared<CameraState>(cameraId, cost.resourceCost,
                 conflicting, deviceKind, physicalCameraIds));
     }
 
-    if (mFlashlight->hasFlashUnit(id)) {
+    if (mFlashlight->hasFlashUnit(cameraId)) {
         Mutex::Autolock al(mTorchStatusMutex);
-        mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
+        mTorchStatusMap.add(cameraId, TorchModeStatus::AVAILABLE_OFF);
 
-        broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF, deviceKind);
+        broadcastTorchModeStatus(cameraId, TorchModeStatus::AVAILABLE_OFF, deviceKind);
     }
 
     updateCameraNumAndIds();
-    logDeviceAdded(id, "Device added");
+    logDeviceAdded(cameraId, "Device added");
 }
 
-void CameraService::removeStates(const String8 id) {
+void CameraService::removeStates(const std::string& cameraId) {
     updateCameraNumAndIds();
-    if (mFlashlight->hasFlashUnit(id)) {
+    if (mFlashlight->hasFlashUnit(cameraId)) {
         Mutex::Autolock al(mTorchStatusMutex);
-        mTorchStatusMap.removeItem(id);
+        mTorchStatusMap.removeItem(cameraId);
     }
 
     {
         Mutex::Autolock lock(mCameraStatesLock);
-        mCameraStates.erase(id);
+        mCameraStates.erase(cameraId);
     }
 }
 
-void CameraService::onDeviceStatusChanged(const String8& id,
+void CameraService::onDeviceStatusChanged(const std::string& cameraId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, newStatus=%d", __FUNCTION__,
-            id.string(), newHalStatus);
+            cameraId.c_str(), newHalStatus);
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
-    std::shared_ptr<CameraState> state = getCameraState(id);
+    std::shared_ptr<CameraState> state = getCameraState(cameraId);
 
     if (state == nullptr) {
         if (newStatus == StatusInternal::PRESENT) {
             ALOGI("%s: Unknown camera ID %s, a new camera is added",
-                    __FUNCTION__, id.string());
+                    __FUNCTION__, cameraId.c_str());
 
             // First add as absent to make sure clients are notified below
-            addStates(id);
+            addStates(cameraId);
 
-            updateStatus(newStatus, id);
+            updateStatus(newStatus, cameraId);
         } else {
-            ALOGE("%s: Bad camera ID %s", __FUNCTION__, id.string());
+            ALOGE("%s: Bad camera ID %s", __FUNCTION__, cameraId.c_str());
         }
         return;
     }
@@ -515,12 +504,12 @@
     }
 
     if (newStatus == StatusInternal::NOT_PRESENT) {
-        logDeviceRemoved(id, String8::format("Device status changed from %d to %d", oldStatus,
+        logDeviceRemoved(cameraId, fmt::sprintf("Device status changed from %d to %d", oldStatus,
                 newStatus));
 
         // Set the device status to NOT_PRESENT, clients will no longer be able to connect
         // to this device until the status changes
-        updateStatus(StatusInternal::NOT_PRESENT, id);
+        updateStatus(StatusInternal::NOT_PRESENT, cameraId);
 
         sp<BasicClient> clientToDisconnectOnline, clientToDisconnectOffline;
         {
@@ -532,28 +521,28 @@
 
             // Remove online as well as offline client from the list of active clients,
             // if they are present
-            clientToDisconnectOnline = removeClientLocked(id);
-            clientToDisconnectOffline = removeClientLocked(kOfflineDevice + id);
+            clientToDisconnectOnline = removeClientLocked(cameraId);
+            clientToDisconnectOffline = removeClientLocked(kOfflineDevice + cameraId);
         }
 
-        disconnectClient(id, clientToDisconnectOnline);
-        disconnectClient(kOfflineDevice + id, clientToDisconnectOffline);
+        disconnectClient(cameraId, clientToDisconnectOnline);
+        disconnectClient(kOfflineDevice + cameraId, clientToDisconnectOffline);
 
-        removeStates(id);
+        removeStates(cameraId);
     } else {
         if (oldStatus == StatusInternal::NOT_PRESENT) {
-            logDeviceAdded(id, String8::format("Device status changed from %d to %d", oldStatus,
+            logDeviceAdded(cameraId, fmt::sprintf("Device status changed from %d to %d", oldStatus,
                     newStatus));
         }
-        updateStatus(newStatus, id);
+        updateStatus(newStatus, cameraId);
     }
 }
 
-void CameraService::onDeviceStatusChanged(const String8& id,
-        const String8& physicalId,
+void CameraService::onDeviceStatusChanged(const std::string& id,
+        const std::string& physicalId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, physicalCameraId=%s, newStatus=%d",
-            __FUNCTION__, id.string(), physicalId.string(), newHalStatus);
+            __FUNCTION__, id.c_str(), physicalId.c_str(), newHalStatus);
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -561,7 +550,7 @@
 
     if (state == nullptr) {
         ALOGE("%s: Physical camera id %s status change on a non-present ID %s",
-                __FUNCTION__, physicalId.string(), id.string());
+                __FUNCTION__, physicalId.c_str(), id.c_str());
         return;
     }
 
@@ -569,7 +558,7 @@
     if (logicalCameraStatus != StatusInternal::PRESENT &&
             logicalCameraStatus != StatusInternal::NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalId.string(), newHalStatus, logicalCameraStatus);
+                __FUNCTION__, physicalId.c_str(), newHalStatus, logicalCameraStatus);
         return;
     }
 
@@ -581,21 +570,18 @@
     }
 
     if (updated) {
-        String8 idCombo = id + " : " + physicalId;
+        std::string idCombo = id + " : " + physicalId;
         if (newStatus == StatusInternal::PRESENT) {
-            logDeviceAdded(idCombo,
-                    String8::format("Device status changed to %d", newStatus));
+            logDeviceAdded(idCombo, fmt::sprintf("Device status changed to %d", newStatus));
         } else {
-            logDeviceRemoved(idCombo,
-                    String8::format("Device status changed to %d", newStatus));
+            logDeviceRemoved(idCombo, fmt::sprintf("Device status changed to %d", newStatus));
         }
         // Avoid calling getSystemCameraKind() with mStatusListenerLock held (b/141756275)
         SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
         if (getSystemCameraKind(id, &deviceKind) != OK) {
-            ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, id.string());
+            ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, id.c_str());
             return;
         }
-        String16 id16(id), physicalId16(physicalId);
         Mutex::Autolock lock(mStatusListenerLock);
         for (auto& listener : mListenerList) {
             if (shouldSkipStatusUpdates(deviceKind, listener->isVendorListener(),
@@ -605,7 +591,7 @@
                 continue;
             }
             auto ret = listener->getListener()->onPhysicalCameraStatusChanged(
-                    mapToInterface(newStatus), id16, physicalId16);
+                    mapToInterface(newStatus), id, physicalId);
             listener->handleBinderStatus(ret,
                     "%s: Failed to trigger onPhysicalCameraStatusChanged for %d:%d: %d",
                     __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -614,10 +600,10 @@
     }
 }
 
-void CameraService::disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect) {
+void CameraService::disconnectClient(const std::string& id, sp<BasicClient> clientToDisconnect) {
     if (clientToDisconnect.get() != nullptr) {
         ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
-                __FUNCTION__, id.string());
+                __FUNCTION__, id.c_str());
         // Notify the client of disconnection
         clientToDisconnect->notifyError(
                 hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
@@ -626,13 +612,13 @@
     }
 }
 
-void CameraService::onTorchStatusChanged(const String8& cameraId,
+void CameraService::onTorchStatusChanged(const std::string& cameraId,
         TorchModeStatus newStatus) {
     SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
     status_t res = getSystemCameraKind(cameraId, &systemCameraKind);
     if (res != OK) {
         ALOGE("%s: Could not get system camera kind for camera id %s", __FUNCTION__,
-                cameraId.string());
+                cameraId.c_str());
         return;
     }
     Mutex::Autolock al(mTorchStatusMutex);
@@ -640,34 +626,33 @@
 }
 
 
-void CameraService::onTorchStatusChanged(const String8& cameraId,
+void CameraService::onTorchStatusChanged(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     Mutex::Autolock al(mTorchStatusMutex);
     onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
 }
 
-void CameraService::broadcastTorchStrengthLevel(const String8& cameraId,
+void CameraService::broadcastTorchStrengthLevel(const std::string& cameraId,
         int32_t newStrengthLevel) {
     Mutex::Autolock lock(mStatusListenerLock);
     for (auto& i : mListenerList) {
-        auto ret = i->getListener()->onTorchStrengthLevelChanged(String16{cameraId},
-                newStrengthLevel);
+        auto ret = i->getListener()->onTorchStrengthLevelChanged(cameraId, newStrengthLevel);
         i->handleBinderStatus(ret,
                 "%s: Failed to trigger onTorchStrengthLevelChanged for %d:%d: %d", __FUNCTION__,
                 i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
     }
 }
 
-void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
+void CameraService::onTorchStatusChangedLocked(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
-            __FUNCTION__, cameraId.string(), newStatus);
+            __FUNCTION__, cameraId.c_str(), newStatus);
 
     TorchModeStatus status;
     status_t res = getTorchStatusLocked(cameraId, &status);
     if (res) {
         ALOGE("%s: cannot get torch status of camera %s: %s (%d)",
-                __FUNCTION__, cameraId.string(), strerror(-res), res);
+                __FUNCTION__, cameraId.c_str(), strerror(-res), res);
         return;
     }
     if (status == newStatus) {
@@ -692,18 +677,18 @@
             if (oldUid != newUid) {
                 // If the UID has changed, log the status and update current UID in mTorchUidMap
                 if (status == TorchModeStatus::AVAILABLE_ON) {
-                    notifier.noteFlashlightOff(cameraId, oldUid);
+                    notifier.noteFlashlightOff(toString8(cameraId), oldUid);
                 }
                 if (newStatus == TorchModeStatus::AVAILABLE_ON) {
-                    notifier.noteFlashlightOn(cameraId, newUid);
+                    notifier.noteFlashlightOn(toString8(cameraId), newUid);
                 }
                 iter->second.second = newUid;
             } else {
                 // If the UID has not changed, log the status
                 if (newStatus == TorchModeStatus::AVAILABLE_ON) {
-                    notifier.noteFlashlightOn(cameraId, oldUid);
+                    notifier.noteFlashlightOn(toString8(cameraId), oldUid);
                 } else {
-                    notifier.noteFlashlightOff(cameraId, oldUid);
+                    notifier.noteFlashlightOff(toString8(cameraId), oldUid);
                 }
             }
         }
@@ -711,17 +696,100 @@
     broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
 }
 
-static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
+static bool isAutomotiveDevice() {
+    // Checks the property ro.hardware.type and returns true if it is
+    // automotive.
+    char value[PROPERTY_VALUE_MAX] = {0};
+    property_get("ro.hardware.type", value, "");
+    return strncmp(value, "automotive", PROPERTY_VALUE_MAX) == 0;
+}
+
+static bool isAutomotivePrivilegedClient(int32_t uid) {
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns true if the uid is AID_AUTOMOTIVE_EVS which is a
+    // privileged client uid used for safety critical use cases such as
+    // rear view and surround view.
+    return uid == AID_AUTOMOTIVE_EVS;
+}
+
+bool CameraService::isAutomotiveExteriorSystemCamera(const std::string& cam_id) const{
+    // Returns false if this is not an automotive device type.
+    if (!isAutomotiveDevice())
+        return false;
+
+    // Returns false if no camera id is provided.
+    if (cam_id.empty())
+        return false;
+
+    SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+    if (getSystemCameraKind(cam_id, &systemCameraKind) != OK) {
+        // This isn't a known camera ID, so it's not a system camera.
+        ALOGE("%s: Unknown camera id %s, ", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    if (systemCameraKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) {
+        ALOGE("%s: camera id %s is not a system camera", __FUNCTION__, cam_id.c_str());
+        return false;
+    }
+
+    CameraMetadata cameraInfo;
+    status_t res = mCameraProviderManager->getCameraCharacteristics(
+            cam_id, false, &cameraInfo, false);
+    if (res != OK){
+        ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
+                cam_id.c_str());
+        return false;
+    }
+
+    camera_metadata_entry auto_location  = cameraInfo.find(ANDROID_AUTOMOTIVE_LOCATION);
+    if (auto_location.count != 1)
+        return false;
+
+    uint8_t location = auto_location.data.u8[0];
+    if ((location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_REAR) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT) &&
+            (location != ANDROID_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT)) {
+        return false;
+    }
+
+    return true;
+}
+
+bool CameraService::checkPermission(const std::string& cameraId, const std::string& permission,
+        const AttributionSourceState& attributionSource, const std::string& message,
+        int32_t attributedOpCode) const{
+    if (isAutomotivePrivilegedClient(attributionSource.uid)) {
+        // If cameraId is empty, then it means that this check is not used for the
+        // purpose of accessing a specific camera, hence grant permission just
+        // based on uid to the automotive privileged client.
+        if (cameraId.empty())
+            return true;
+        // If this call is used for accessing a specific camera then cam_id must be provided.
+        // In that case, only pre-grants the permission for accessing the exterior system only
+        // camera.
+        return isAutomotiveExteriorSystemCamera(cameraId);
+    }
+
     permission::PermissionChecker permissionChecker;
+    return permissionChecker.checkPermissionForPreflight(toString16(permission), attributionSource,
+            toString16(message), attributedOpCode)
+            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
+bool CameraService::hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid,
+        int callingUid) const{
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-            sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(cameraId,
+            sSystemCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
+    bool checkPermissionForCamera = checkPermission(cameraId,
+            sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
     return checkPermissionForSystemCamera && checkPermissionForCamera;
 }
 
@@ -729,7 +797,7 @@
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
+            hasPermissionsForSystemCamera(std::string(), CameraThreadState::getCallingPid(),
                     CameraThreadState::getCallingUid());
     switch (type) {
         case CAMERA_TYPE_BACKWARD_COMPATIBLE:
@@ -755,9 +823,8 @@
     return Status::ok();
 }
 
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping&
-      cameraIdRemapping) {
-    if (!checkCallingPermission(sCameraInjectExternalCameraPermission)) {
+Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
+    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
         const int pid = CameraThreadState::getCallingPid();
         const int uid = CameraThreadState::getCallingUid();
         ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
@@ -777,34 +844,33 @@
 Status CameraService::parseCameraIdRemapping(
         const hardware::CameraIdRemapping& cameraIdRemapping,
         /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
-    String16 packageName;
-    String8 cameraIdToReplace, updatedCameraId;
+    std::string packageName;
+    std::string cameraIdToReplace, updatedCameraId;
     for(const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
         packageName = packageIdRemapping.packageName;
-        if (packageName == String16("")) {
+        if (packageName.empty()) {
             return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
                     "CameraIdRemapping: Package name cannot be empty");
         }
-
         if (packageIdRemapping.cameraIdsToReplace.size()
             != packageIdRemapping.updatedCameraIds.size()) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
                     "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
-                     String8(packageName).c_str());
+                    packageName.c_str());
         }
         for(size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
-            cameraIdToReplace = String8(packageIdRemapping.cameraIdsToReplace[i]);
-            updatedCameraId = String8(packageIdRemapping.updatedCameraIds[i]);
-            if (cameraIdToReplace == String8("") || updatedCameraId == String8("")) {
+            cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
+            updatedCameraId = packageIdRemapping.updatedCameraIds[i];
+            if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
                 return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
                         "CameraIdRemapping: Camera Id cannot be empty for package %s",
-                        String8(packageName).c_str());
+                        packageName.c_str());
             }
             if (cameraIdToReplace == updatedCameraId) {
                 return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
                         "CameraIdRemapping: CameraIdToReplace cannot be the same"
                         " as updatedCameraId for %s",
-                        String8(packageName).c_str());
+                        packageName.c_str());
             }
             (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
         }
@@ -822,10 +888,10 @@
     // remapped in cameraIdRemapping, but only if they were being used by an
     // affected packageName.
     std::vector<sp<BasicClient>> clientsToDisconnect;
-    std::vector<String8> cameraIdsToUpdate;
+    std::vector<std::string> cameraIdsToUpdate;
     for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
         for (auto& [id0, id1] : injectionMap) {
-            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, String8(packageName).c_str(),
+            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
                     id0.c_str(), id1.c_str());
             auto clientDescriptor = mActiveClientManager.get(id0);
             if (clientDescriptor != nullptr) {
@@ -860,10 +926,10 @@
     mServiceLock.lock();
 }
 
-std::vector<String8> CameraService::findOriginalIdsForRemappedCameraId(
-    const String8& inputCameraId, int clientUid) {
-    String16 packageName = getPackageNameFromUid(clientUid);
-    std::vector<String8> cameraIds;
+std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
+    const std::string& inputCameraId, int clientUid) {
+    std::string packageName = getPackageNameFromUid(clientUid);
+    std::vector<std::string> cameraIds;
     Mutex::Autolock lock(mCameraIdRemappingLock);
     if (auto packageMapIter = mCameraIdRemapping.find(packageName);
         packageMapIter != mCameraIdRemapping.end()) {
@@ -876,15 +942,15 @@
     return cameraIds;
 }
 
-String8 CameraService::resolveCameraId(const String8& inputCameraId) {
-  return resolveCameraId(inputCameraId, String16(""));
+std::string CameraService::resolveCameraId(const std::string& inputCameraId) {
+  return resolveCameraId(inputCameraId, "");
 }
 
-String8 CameraService::resolveCameraId(
-    const String8& inputCameraId,
-    const String16& packageName) {
-    String16 packageNameVal = packageName;
-    if (packageName == String16("")) {
+std::string CameraService::resolveCameraId(
+    const std::string& inputCameraId,
+    const std::string& packageName) {
+    std::string packageNameVal = packageName;
+    if (packageName == "") {
         int clientUid = CameraThreadState::getCallingUid();
         packageNameVal = getPackageNameFromUid(clientUid);
     }
@@ -892,7 +958,7 @@
     if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
         packageMapIter != mCameraIdRemapping.end()) {
         ALOGI("%s: resolveCameraId: packageName found %s",
-                __FUNCTION__, String8(packageNameVal).c_str());
+                __FUNCTION__, std::string(packageNameVal).c_str());
         auto packageMap = packageMapIter->second;
         if (auto replacementIdIter = packageMap.find(inputCameraId);
             replacementIdIter != packageMap.end()) {
@@ -910,19 +976,18 @@
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
     std::string cameraIdStr = cameraIdIntToStrLocked(cameraId);
-    if (shouldRejectSystemCameraConnection(String8(cameraIdStr.c_str()))) {
+    if (shouldRejectSystemCameraConnection(cameraIdStr)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
                 "characteristics for system only device %s: ", cameraIdStr.c_str());
     }
 
     if (!mInitialized) {
-        logServiceError(String8::format("Camera subsystem is not available"),ERROR_DISCONNECTED);
+        logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
-    bool hasSystemCameraPermissions =
-            hasPermissionsForSystemCamera(CameraThreadState::getCallingPid(),
-                    CameraThreadState::getCallingUid());
+    bool hasSystemCameraPermissions = hasPermissionsForSystemCamera(std::to_string(cameraId),
+            CameraThreadState::getCallingPid(), CameraThreadState::getCallingUid());
     int cameraIdBound = mNumberOfCamerasWithoutSystemCamera;
     if (hasSystemCameraPermissions) {
         cameraIdBound = mNumberOfCameras;
@@ -935,13 +1000,13 @@
     Status ret = Status::ok();
     int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr.c_str(), overrideToPortrait, &portraitRotation, cameraInfo);
+            cameraIdStr, overrideToPortrait, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
                 strerror(-err), err);
-        logServiceError(String8::format("Error retrieving camera info from device %d",cameraId),
-            ERROR_INVALID_OPERATION);
+        logServiceError(std::string("Error retrieving camera info from device ")
+                + std::to_string(cameraId), ERROR_INVALID_OPERATION);
     }
 
     return ret;
@@ -951,13 +1016,12 @@
     const std::vector<std::string> *deviceIds = &mNormalDeviceIdsWithoutSystemCamera;
     auto callingPid = CameraThreadState::getCallingPid();
     auto callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForSystemCamera = permissionChecker.checkPermissionForPreflight(
-                sSystemCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForSystemCamera = checkPermission(std::to_string(cameraIdInt),
+                sSystemCameraPermission, attributionSource, std::string(),
+                AppOpsManager::OP_NONE);
     if (checkPermissionForSystemCamera || getpid() == callingPid) {
         deviceIds = &mNormalDeviceIds;
     }
@@ -970,15 +1034,17 @@
     return (*deviceIds)[cameraIdInt];
 }
 
-String8 CameraService::cameraIdIntToStr(int cameraIdInt) {
+std::string CameraService::cameraIdIntToStr(int cameraIdInt) {
     Mutex::Autolock lock(mServiceLock);
-    return String8(cameraIdIntToStrLocked(cameraIdInt).c_str());
+    return cameraIdIntToStrLocked(cameraIdInt);
 }
 
-Status CameraService::getCameraCharacteristics(const String16& unresolvedCameraId,
+Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
         int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
-    String8 cameraId = resolveCameraId(String8(unresolvedCameraId));
+
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
+
     if (!cameraInfo) {
         ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
@@ -986,42 +1052,41 @@
 
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
-        logServiceError(String8::format("Camera subsystem is not available"),ERROR_DISCONNECTED);
+        logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");;
     }
 
-    if (shouldRejectSystemCameraConnection(String8(cameraId))) {
+    if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera"
-                "characteristics for system only device %s: ", String8(cameraId).string());
+                "characteristics for system only device %s: ", cameraId.c_str());
     }
 
     Status ret{};
 
-    std::string cameraIdStr = cameraId.string();
     bool overrideForPerfClass =
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
-                    cameraIdStr, targetSdkVersion);
+                    cameraId, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraIdStr, overrideForPerfClass, cameraInfo, overrideToPortrait);
+            cameraId, overrideForPerfClass, cameraInfo, overrideToPortrait);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
-                    "characteristics for unknown device %s: %s (%d)", String8(cameraId).string(),
+                    "characteristics for unknown device %s: %s (%d)", cameraId.c_str(),
                     strerror(-res), res);
         } else {
-            logServiceError(String8::format("Unable to retrieve camera characteristics for "
-            "device %s.", String8(cameraId).string()),ERROR_INVALID_OPERATION);
+            logServiceError(fmt::sprintf("Unable to retrieve camera characteristics for device %s.",
+                    cameraId.c_str()), ERROR_INVALID_OPERATION);
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
-                    "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+                    "characteristics for device %s: %s (%d)", cameraId.c_str(),
                     strerror(-res), res);
         }
     }
     SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
-    if (getSystemCameraKind(String8(cameraId), &deviceKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, String8(cameraId).string());
+    if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
+        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera kind "
-                "for device %s", String8(cameraId).string());
+                "for device %s", cameraId.c_str());
     }
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
@@ -1029,24 +1094,22 @@
     // If it's not calling from cameraserver, check the permission only if
     // android.permission.CAMERA is required. If android.permission.SYSTEM_CAMERA was needed,
     // it would've already been checked in shouldRejectSystemCameraConnection.
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission,
+            attributionSource, std::string(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) &&
             (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) &&
             !checkPermissionForCamera) {
         res = cameraInfo->removePermissionEntries(
-                mCameraProviderManager->getProviderTagIdLocked(String8(cameraId).string()),
+                mCameraProviderManager->getProviderTagIdLocked(cameraId),
                 &tagsRemoved);
         if (res != OK) {
             cameraInfo->clear();
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to remove camera"
                     " characteristics needing camera permission for device %s: %s (%d)",
-                    String8(cameraId).string(), strerror(-res), res);
+                    cameraId.c_str(), strerror(-res), res);
         }
     }
 
@@ -1056,7 +1119,7 @@
         if (res != OK) {
             cameraInfo->clear();
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Failed to insert camera "
-                    "keys needing permission for device %s: %s (%d)", String8(cameraId).string(),
+                    "keys needing permission for device %s: %s (%d)", cameraId.c_str(),
                     strerror(-res), res);
         }
     }
@@ -1064,10 +1127,11 @@
     return ret;
 }
 
-Status CameraService::getTorchStrengthLevel(const String16& cameraId,
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
         int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized.", __FUNCTION__);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera HAL couldn't be initialized.");
@@ -1078,22 +1142,21 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Strength level should not be null.");
     }
 
-    status_t res = mCameraProviderManager->getTorchStrengthLevel(String8(cameraId).string(),
-        torchStrength);
+    status_t res = mCameraProviderManager->getTorchStrengthLevel(cameraId, torchStrength);
     if (res != OK) {
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve torch "
-            "strength level for device %s: %s (%d)", String8(cameraId).string(),
+            "strength level for device %s: %s (%d)", cameraId.c_str(),
             strerror(-res), res);
     }
     ALOGI("%s: Torch strength level is: %d", __FUNCTION__, *torchStrength);
     return Status::ok();
 }
 
-String8 CameraService::getFormattedCurrentTime() {
+std::string CameraService::getFormattedCurrentTime() {
     time_t now = time(nullptr);
     char formattedTime[64];
     strftime(formattedTime, sizeof(formattedTime), "%m-%d %H:%M:%S", localtime(&now));
-    return String8(formattedTime);
+    return std::string(formattedTime);
 }
 
 Status CameraService::getCameraVendorTagDescriptor(
@@ -1131,7 +1194,7 @@
     BasicClient::BasicClient::sCameraService = nullptr;
 }
 
-std::pair<int, IPCTransport> CameraService::getDeviceVersion(const String8& cameraId,
+std::pair<int, IPCTransport> CameraService::getDeviceVersion(const std::string& cameraId,
         bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
     ATRACE_CALL();
 
@@ -1140,18 +1203,17 @@
     status_t res;
     hardware::hidl_version maxVersion{0,0};
     IPCTransport transport = IPCTransport::INVALID;
-    res = mCameraProviderManager->getHighestSupportedVersion(cameraId.string(),
-            &maxVersion, &transport);
+    res = mCameraProviderManager->getHighestSupportedVersion(cameraId, &maxVersion, &transport);
     if (res != OK || transport == IPCTransport::INVALID) {
         ALOGE("%s: Unable to get highest supported version for camera id %s", __FUNCTION__,
-                cameraId.string());
+                cameraId.c_str());
         return std::make_pair(-1, IPCTransport::INVALID) ;
     }
     deviceVersion = HARDWARE_DEVICE_API_VERSION(maxVersion.get_major(), maxVersion.get_minor());
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId.string(), overrideToPortrait,
+        res = mCameraProviderManager->getCameraInfo(cameraId, overrideToPortrait,
                 portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
@@ -1183,12 +1245,13 @@
 }
 
 Status CameraService::makeClient(const sp<CameraService>& cameraService,
-        const sp<IInterface>& cameraCb, const String16& packageName, bool systemNativeClient,
-        const std::optional<String16>& featureId,  const String8& cameraId,
+        const sp<IInterface>& cameraCb, const std::string& packageName, bool systemNativeClient,
+        const std::optional<std::string>& featureId,  const std::string& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
         apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
-        bool forceSlowJpegMode, const String8& originalCameraId, /*out*/sp<BasicClient>* client) {
+        bool forceSlowJpegMode, const std::string& originalCameraId,
+        /*out*/sp<BasicClient>* client) {
     // For HIDL devices
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         // Create CameraClient based on device version reported by the HAL.
@@ -1198,7 +1261,7 @@
                 ALOGE("Camera using old HAL version: %d", deviceVersion);
                 return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
                         "Camera device \"%s\" HAL version %d no longer supported",
-                        cameraId.string(), deviceVersion);
+                        cameraId.c_str(), deviceVersion);
                 break;
             case CAMERA_DEVICE_API_VERSION_3_0:
             case CAMERA_DEVICE_API_VERSION_3_1:
@@ -1214,13 +1277,14 @@
                 ALOGE("Unknown camera device HAL version: %d", deviceVersion);
                 return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                         "Camera device \"%s\" has unknown HAL version %d",
-                        cameraId.string(), deviceVersion);
+                        cameraId.c_str(), deviceVersion);
         }
     }
     if (effectiveApiLevel == API_1) { // Camera1 API route
         sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
         *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
-                packageName, featureId, cameraId, api1CameraId, facing, sensorOrientation,
+                packageName, featureId, cameraId,
+                api1CameraId, facing, sensorOrientation,
                 clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
                 forceSlowJpegMode);
         ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
@@ -1237,18 +1301,18 @@
     return Status::ok();
 }
 
-String8 CameraService::toString(std::set<userid_t> intSet) {
-    String8 s("");
+std::string CameraService::toString(std::set<userid_t> intSet) {
+    std::ostringstream s;
     bool first = true;
     for (userid_t i : intSet) {
         if (first) {
-            s.appendFormat("%d", i);
+            s << std::to_string(i);
             first = false;
         } else {
-            s.appendFormat(", %d", i);
+            s << ", " << std::to_string(i);
         }
     }
-    return s;
+    return std::move(s.str());
 }
 
 int32_t CameraService::mapToInterface(TorchModeStatus status) {
@@ -1314,16 +1378,15 @@
 Status CameraService::initializeShimMetadata(int cameraId) {
     int uid = CameraThreadState::getCallingUid();
 
-    String16 internalPackageName("cameraserver");
-    String8 id = String8::format("%d", cameraId);
+    std::string cameraIdStr = std::to_string(cameraId);
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
     if (!(ret = connectHelper<ICameraClient,Client>(
-            sp<ICameraClient>{nullptr}, id, cameraId,
-            internalPackageName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
+            sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
+            kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
-            /*forceSlowJpegMode*/false, id, /*out*/ tmp)
+            /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().string());
     }
@@ -1343,17 +1406,17 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Parameters must not be null");
     }
 
-    String8 id = String8::format("%d", cameraId);
+    std::string cameraIdStr = std::to_string(cameraId);
 
     // Check if we already have parameters
     {
         // Scope for service lock
         Mutex::Autolock lock(mServiceLock);
-        auto cameraState = getCameraState(id);
+        auto cameraState = getCameraState(cameraIdStr);
         if (cameraState == nullptr) {
-            ALOGE("%s: Invalid camera ID: %s", __FUNCTION__, id.string());
+            ALOGE("%s: Invalid camera ID: %s", __FUNCTION__, cameraIdStr.c_str());
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "Invalid camera ID: %s", id.string());
+                    "Invalid camera ID: %s", cameraIdStr.c_str());
         }
         CameraParameters p = cameraState->getShimParams();
         if (!p.isEmpty()) {
@@ -1374,11 +1437,11 @@
     {
         // Scope for service lock
         Mutex::Autolock lock(mServiceLock);
-        auto cameraState = getCameraState(id);
+        auto cameraState = getCameraState(cameraIdStr);
         if (cameraState == nullptr) {
-            ALOGE("%s: Invalid camera ID: %s", __FUNCTION__, id.string());
+            ALOGE("%s: Invalid camera ID: %s", __FUNCTION__, cameraIdStr.c_str());
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "Invalid camera ID: %s", id.string());
+                    "Invalid camera ID: %s", cameraIdStr.c_str());
         }
         CameraParameters p = cameraState->getShimParams();
         if (!p.isEmpty()) {
@@ -1404,12 +1467,13 @@
     }
 }
 
-static status_t getUidForPackage(String16 packageName, int userId, /*inout*/uid_t& uid, int err) {
+static status_t getUidForPackage(const std::string &packageName, int userId, /*inout*/uid_t& uid,
+        int err) {
     PermissionController pc;
-    uid = pc.getPackageUid(packageName, 0);
+    uid = pc.getPackageUid(toString16(packageName), 0);
     if (uid <= 0) {
-        ALOGE("Unknown package: '%s'", String8(packageName).string());
-        dprintf(err, "Unknown package: '%s'\n", String8(packageName).string());
+        ALOGE("Unknown package: '%s'", packageName.c_str());
+        dprintf(err, "Unknown package: '%s'\n", packageName.c_str());
         return BAD_VALUE;
     }
 
@@ -1423,8 +1487,8 @@
     return NO_ERROR;
 }
 
-Status CameraService::validateConnectLocked(const String8& cameraId,
-        const String8& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
+Status CameraService::validateConnectLocked(const std::string& cameraId,
+        const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
         /*out*/int& originalClientPid) const {
 
 #ifdef __BRILLO__
@@ -1446,14 +1510,14 @@
         ALOGE("CameraService::connect X (PID %d) rejected (camera HAL module not loaded)",
                 callingPid);
         return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
-                "No camera HAL module available to open camera device \"%s\"", cameraId.string());
+                "No camera HAL module available to open camera device \"%s\"", cameraId.c_str());
     }
 
     if (getCameraState(cameraId) == nullptr) {
         ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid,
-                cameraId.string());
+                cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
-                "No camera device with ID \"%s\" available", cameraId.string());
+                "No camera device with ID \"%s\" available", cameraId.c_str());
     }
 
     status_t err = checkIfDeviceIsUsable(cameraId);
@@ -1462,19 +1526,18 @@
             case -ENODEV:
             case -EBUSY:
                 return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
-                        "No camera device with ID \"%s\" currently available", cameraId.string());
+                        "No camera device with ID \"%s\" currently available", cameraId.c_str());
             default:
                 return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
-                        "Unknown error connecting to ID \"%s\"", cameraId.string());
+                        "Unknown error connecting to ID \"%s\"", cameraId.c_str());
         }
     }
     return Status::ok();
 }
 
-Status CameraService::validateClientPermissionsLocked(const String8& cameraId,
-        const String8& clientName8, int& clientUid, int& clientPid,
+Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
+        const std::string& clientName, int& clientUid, int& clientPid,
         /*out*/int& originalClientPid) const {
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
 
     int callingPid = CameraThreadState::getCallingPid();
@@ -1489,8 +1552,8 @@
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
-                callingPid, callingUid, cameraId.string(),
-                clientName8.string(), clientUid, clientPid);
+                callingPid, callingUid, cameraId.c_str(),
+                clientName.c_str(), clientUid, clientPid);
     }
 
     // Check if we can trust clientPid
@@ -1502,21 +1565,21 @@
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Untrusted caller (calling PID %d, UID %d) trying to "
                 "forward camera access to camera %s for client %s (PID %d, UID %d)",
-                callingPid, callingUid, cameraId.string(),
-                clientName8.string(), clientUid, clientPid);
+                callingPid, callingUid, cameraId.c_str(),
+                clientName.c_str(), clientUid, clientPid);
     }
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
         ALOGW("Attempting to connect to system-only camera id %s, connection rejected",
                 cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_DISCONNECTED, "No camera device with ID \"%s\" is"
-                                "available", cameraId.string());
+                                "available", cameraId.c_str());
     }
     SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.string());
+        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "No camera device with ID \"%s\""
-                "found while trying to query device kind", cameraId.string());
+                "found while trying to query device kind", cameraId.c_str());
 
     }
 
@@ -1525,36 +1588,40 @@
     // android.permission.SYSTEM_CAMERA for system only camera devices).
     attributionSource.pid = clientPid;
     attributionSource.uid = clientUid;
-    attributionSource.packageName = clientName8;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-            sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-            != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    attributionSource.packageName = clientName;
+    bool checkPermissionForCamera = checkPermission(cameraId, sCameraPermission, attributionSource,
+            std::string(), AppOpsManager::OP_NONE);
     if (callingPid != getpid() &&
                 (deviceKind != SystemCameraKind::SYSTEM_ONLY_CAMERA) && !checkPermissionForCamera) {
         ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" without camera permission",
-                clientName8.string(), clientUid, clientPid, cameraId.string());
+                clientName.c_str(), clientUid, clientPid, cameraId.c_str());
     }
 
     // Make sure the UID is in an active state to use the camera
-    if (!mUidPolicy->isUidActive(callingUid, String16(clientName8))) {
+    if (!mUidPolicy->isUidActive(callingUid, clientName)) {
         int32_t procState = mUidPolicy->getProcState(callingUid);
         ALOGE("Access Denial: can't use the camera from an idle UID pid=%d, uid=%d",
             clientPid, clientUid);
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" from background ("
                 "calling UID %d proc state %" PRId32 ")",
-                clientName8.string(), clientUid, clientPid, cameraId.string(),
+                clientName.c_str(), clientUid, clientPid, cameraId.c_str(),
                 callingUid, procState);
     }
 
-    // If sensor privacy is enabled then prevent access to the camera
-    if (mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
+
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled and are exempt from sensor privacy
+    // policy. In all other cases,if sensor privacy is enabled then prevent access to the camera.
+    if ((!isAutomotivePrivilegedClient(callingUid) ||
+            !isAutomotiveExteriorSystemCamera(cameraId)) &&
+            mSensorPrivacyPolicy->isSensorPrivacyEnabled()) {
         ALOGE("Access Denial: cannot use the camera when sensor privacy is enabled");
         return STATUS_ERROR_FMT(ERROR_DISABLED,
                 "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" when sensor privacy "
-                "is enabled", clientName8.string(), clientUid, clientPid, cameraId.string());
+                "is enabled", clientName.c_str(), clientUid, clientPid, cameraId.c_str());
     }
 
     // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
@@ -1570,32 +1637,32 @@
             (mAllowedUsers.find(clientUserId) == mAllowedUsers.end())) {
         ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from "
                 "device user %d, currently allowed device users: %s)", callingPid, clientUserId,
-                toString(mAllowedUsers).string());
+                toString(mAllowedUsers).c_str());
         return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                 "Callers from device user %d are not currently allowed to connect to camera \"%s\"",
-                clientUserId, cameraId.string());
+                clientUserId, cameraId.c_str());
     }
 
     return Status::ok();
 }
 
-status_t CameraService::checkIfDeviceIsUsable(const String8& cameraId) const {
+status_t CameraService::checkIfDeviceIsUsable(const std::string& cameraId) const {
     auto cameraState = getCameraState(cameraId);
     int callingPid = CameraThreadState::getCallingPid();
     if (cameraState == nullptr) {
         ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid,
-                cameraId.string());
+                cameraId.c_str());
         return -ENODEV;
     }
 
     StatusInternal currentStatus = cameraState->getStatus();
     if (currentStatus == StatusInternal::NOT_PRESENT) {
         ALOGE("CameraService::connect X (PID %d) rejected (camera %s is not connected)",
-                callingPid, cameraId.string());
+                callingPid, cameraId.c_str());
         return -ENODEV;
     } else if (currentStatus == StatusInternal::ENUMERATING) {
         ALOGE("CameraService::connect X (PID %d) rejected, (camera %s is initializing)",
-                callingPid, cameraId.string());
+                callingPid, cameraId.c_str());
         return -EBUSY;
     }
 
@@ -1612,13 +1679,13 @@
     auto evicted = mActiveClientManager.addAndEvict(clientDescriptor);
 
     logConnected(desc->getKey(), static_cast<int>(desc->getOwnerId()),
-            String8(client->getPackageName()));
+            client->getPackageName());
 
     if (evicted.size() > 0) {
         // This should never happen - clients should already have been removed in disconnect
         for (auto& i : evicted) {
             ALOGE("%s: Invalid state: Client for camera %s was not removed in disconnect",
-                    __FUNCTION__, i->getKey().string());
+                    __FUNCTION__, i->getKey().c_str());
         }
 
         LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, clients not evicted properly",
@@ -1636,12 +1703,12 @@
     }
 }
 
-status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clientPid,
-        apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName,
-        int oomScoreOffset, bool systemNativeClient,
+status_t CameraService::handleEvictionsLocked(const std::string& cameraId, int clientPid,
+        apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback,
+        const std::string& packageName, int oomScoreOffset, bool systemNativeClient,
         /*out*/
         sp<BasicClient>* client,
-        std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial) {
+        std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>* partial) {
     ATRACE_CALL();
     status_t ret = NO_ERROR;
     std::vector<DescriptorPtr> evictedClients;
@@ -1668,52 +1735,66 @@
             }
         }
 
-        // Get current active client PIDs
-        std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
-        ownerPids.push_back(clientPid);
-
-        std::vector<int> priorityScores(ownerPids.size());
-        std::vector<int> states(ownerPids.size());
-
-        // Get priority scores of all active PIDs
-        status_t err = ProcessInfoService::getProcessStatesScoresFromPids(
-                ownerPids.size(), &ownerPids[0], /*out*/&states[0],
-                /*out*/&priorityScores[0]);
-        if (err != OK) {
-            ALOGE("%s: Priority score query failed: %d",
-                  __FUNCTION__, err);
-            return err;
-        }
-
-        // Update all active clients' priorities
-        std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
-        for (size_t i = 0; i < ownerPids.size() - 1; i++) {
-            pidToPriorityMap.emplace(ownerPids[i],
-                    resource_policy::ClientPriority(priorityScores[i], states[i],
-                            /* isVendorClient won't get copied over*/ false,
-                            /* oomScoreOffset won't get copied over*/ 0));
-        }
-        mActiveClientManager.updatePriorities(pidToPriorityMap);
-
         // Get state for the given cameraId
         auto state = getCameraState(cameraId);
         if (state == nullptr) {
             ALOGE("CameraService::connect X (PID %d) rejected (no camera device with ID %s)",
-                clientPid, cameraId.string());
+                clientPid, cameraId.c_str());
             // Should never get here because validateConnectLocked should have errored out
             return BAD_VALUE;
         }
 
-        int32_t actualScore = priorityScores[priorityScores.size() - 1];
-        int32_t actualState = states[states.size() - 1];
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<IBinder> binder = sm->checkService(String16(kProcessInfoServiceName));
+        if (!binder && isAutomotivePrivilegedClient(CameraThreadState::getCallingUid())) {
+            // If processinfo service is not available and the client is automotive privileged
+            // client used for safety critical uses cases such as rear-view and surround-view which
+            // needs to be available before android boot completes, then use the hardcoded values
+            // for the process state and priority score. As this scenario is before android system
+            // services are up and client is native client, hence using NATIVE_ADJ as the priority
+            // score and state as PROCESS_STATE_BOUND_TOP as such automotive apps need to be
+            // visible on the top.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), resource_policy::NATIVE_ADJ, clientPid,
+                    ActivityManager::PROCESS_STATE_BOUND_TOP, oomScoreOffset, systemNativeClient);
+        } else {
+            // Get current active client PIDs
+            std::vector<int> ownerPids(mActiveClientManager.getAllOwners());
+            ownerPids.push_back(clientPid);
 
-        // Make descriptor for incoming client. We store the oomScoreOffset
-        // since we might need it later on new handleEvictionsLocked and
-        // ProcessInfoService would not take that into account.
-        clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
-                sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
-                state->getConflicting(), actualScore, clientPid, actualState,
-                oomScoreOffset, systemNativeClient);
+            std::vector<int> priorityScores(ownerPids.size());
+            std::vector<int> states(ownerPids.size());
+
+            // Get priority scores of all active PIDs
+            status_t err = ProcessInfoService::getProcessStatesScoresFromPids(ownerPids.size(),
+                    &ownerPids[0], /*out*/&states[0], /*out*/&priorityScores[0]);
+            if (err != OK) {
+                ALOGE("%s: Priority score query failed: %d", __FUNCTION__, err);
+                return err;
+            }
+
+            // Update all active clients' priorities
+            std::map<int,resource_policy::ClientPriority> pidToPriorityMap;
+            for (size_t i = 0; i < ownerPids.size() - 1; i++) {
+                pidToPriorityMap.emplace(ownerPids[i],
+                        resource_policy::ClientPriority(priorityScores[i], states[i],
+                        /* isVendorClient won't get copied over*/ false,
+                        /* oomScoreOffset won't get copied over*/ 0));
+            }
+            mActiveClientManager.updatePriorities(pidToPriorityMap);
+
+            int32_t actualScore = priorityScores[priorityScores.size() - 1];
+            int32_t actualState = states[states.size() - 1];
+
+            // Make descriptor for incoming client. We store the oomScoreOffset
+            // since we might need it later on new handleEvictionsLocked and
+            // ProcessInfoService would not take that into account.
+            clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId,
+                    sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()),
+                    state->getConflicting(), actualScore, clientPid, actualState,
+                    oomScoreOffset, systemNativeClient);
+        }
 
         resource_policy::ClientPriority clientPriority = clientDescriptor->getPriority();
 
@@ -1727,25 +1808,25 @@
                     " priority).", clientPid);
 
             sp<BasicClient> clientSp = clientDescriptor->getValue();
-            String8 curTime = getFormattedCurrentTime();
+            std::string curTime = getFormattedCurrentTime();
             auto incompatibleClients =
                     mActiveClientManager.getIncompatibleClients(clientDescriptor);
 
-            String8 msg = String8::format("%s : DENIED connect device %s client for package %s "
-                    "(PID %d, score %d state %d) due to eviction policy", curTime.string(),
-                    cameraId.string(), packageName.string(), clientPid,
+            std::string msg = fmt::sprintf("%s : DENIED connect device %s client for package %s "
+                    "(PID %d, score %d state %d) due to eviction policy", curTime.c_str(),
+                    cameraId.c_str(), packageName.c_str(), clientPid,
                     clientPriority.getScore(), clientPriority.getState());
 
             for (auto& i : incompatibleClients) {
-                msg.appendFormat("\n   - Blocked by existing device %s client for package %s"
+                msg += fmt::sprintf("\n   - Blocked by existing device %s client for package %s"
                         "(PID %" PRId32 ", score %" PRId32 ", state %" PRId32 ")",
-                        i->getKey().string(),
-                        String8{i->getValue()->getPackageName()}.string(),
+                        i->getKey().c_str(),
+                        i->getValue()->getPackageName().c_str(),
                         i->getOwnerId(), i->getPriority().getScore(),
                         i->getPriority().getState());
                 ALOGE("   Conflicts with: Device %s, client package %s (PID %"
-                        PRId32 ", score %" PRId32 ", state %" PRId32 ")", i->getKey().string(),
-                        String8{i->getValue()->getPackageName()}.string(), i->getOwnerId(),
+                        PRId32 ", score %" PRId32 ", state %" PRId32 ")", i->getKey().c_str(),
+                        i->getValue()->getPackageName().c_str(), i->getOwnerId(),
                         i->getPriority().getScore(), i->getPriority().getState());
             }
 
@@ -1774,17 +1855,17 @@
             }
 
             ALOGE("CameraService::connect evicting conflicting client for camera ID %s",
-                    i->getKey().string());
+                    i->getKey().c_str());
             evictedClients.push_back(i);
 
             // Log the clients evicted
-            logEvent(String8::format("EVICT device %s client held by package %s (PID"
+            logEvent(fmt::sprintf("EVICT device %s client held by package %s (PID"
                     " %" PRId32 ", score %" PRId32 ", state %" PRId32 ")\n - Evicted by device %s client for"
                     " package %s (PID %d, score %" PRId32 ", state %" PRId32 ")",
-                    i->getKey().string(), String8{clientSp->getPackageName()}.string(),
+                    i->getKey().c_str(), clientSp->getPackageName().c_str(),
                     i->getOwnerId(), i->getPriority().getScore(),
-                    i->getPriority().getState(), cameraId.string(),
-                    packageName.string(), clientPid, clientPriority.getScore(),
+                    i->getPriority().getState(), cameraId.c_str(),
+                    packageName.c_str(), clientPid, clientPriority.getScore(),
                     clientPriority.getState()));
 
             // Notify the client of disconnection
@@ -1810,18 +1891,18 @@
 
     for (const auto& i : evictedClients) {
         ALOGV("%s: Waiting for disconnect to complete for client for device %s (PID %" PRId32 ")",
-                __FUNCTION__, i->getKey().string(), i->getOwnerId());
+                __FUNCTION__, i->getKey().c_str(), i->getOwnerId());
         ret = mActiveClientManager.waitUntilRemoved(i, DEFAULT_DISCONNECT_TIMEOUT_NS);
         if (ret == TIMED_OUT) {
             ALOGE("%s: Timed out waiting for client for device %s to disconnect, "
-                    "current clients:\n%s", __FUNCTION__, i->getKey().string(),
-                    mActiveClientManager.toString().string());
+                    "current clients:\n%s", __FUNCTION__, i->getKey().c_str(),
+                    mActiveClientManager.toString().c_str());
             return -EBUSY;
         }
         if (ret != NO_ERROR) {
             ALOGE("%s: Received error waiting for client for device %s to disconnect: %s (%d), "
-                    "current clients:\n%s", __FUNCTION__, i->getKey().string(), strerror(-ret),
-                    ret, mActiveClientManager.toString().string());
+                    "current clients:\n%s", __FUNCTION__, i->getKey().c_str(), strerror(-ret),
+                    ret, mActiveClientManager.toString().c_str());
             return ret;
         }
     }
@@ -1843,7 +1924,7 @@
 Status CameraService::connect(
         const sp<ICameraClient>& cameraClient,
         int api1CameraId,
-        const String16& clientPackageName,
+        const std::string& clientPackageName,
         int clientUid,
         int clientPid,
         int targetSdkVersion,
@@ -1855,16 +1936,16 @@
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    String8 id = cameraIdIntToStr(api1CameraId);
+    std::string cameraIdStr = cameraIdIntToStr(api1CameraId);
     sp<Client> client = nullptr;
-    ret = connectHelper<ICameraClient,Client>(cameraClient, id, api1CameraId,
-            clientPackageName,/*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
+    ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
+            clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, id, /*out*/client);
+            overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
 
     if(!ret.isOk()) {
-        logRejected(id, CameraThreadState::getCallingPid(), String8(clientPackageName),
-                ret.toString8());
+        logRejected(cameraIdStr, CameraThreadState::getCallingPid(), clientPackageName,
+                toStdString(ret.toString8()));
         return ret;
     }
 
@@ -1889,13 +1970,13 @@
     //      have android.permission.SYSTEM_CAMERA permissions.
     if (!isVendorListener && (systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA ||
             (systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(clientPid, clientUid)))) {
+            !hasPermissionsForSystemCamera(std::string(), clientPid, clientUid)))) {
         return true;
     }
     return false;
 }
 
-bool CameraService::shouldRejectSystemCameraConnection(const String8& cameraId) const {
+bool CameraService::shouldRejectSystemCameraConnection(const std::string& cameraId) const {
     // Rules for rejection:
     // 1) If cameraserver tries to access this camera device, accept the
     //    connection.
@@ -1929,7 +2010,7 @@
     //     characteristics) even if clients don't have android.permission.CAMERA. We do not want the
     //     same behavior for system camera devices.
     if (!systemClient && systemCameraKind == SystemCameraKind::SYSTEM_ONLY_CAMERA &&
-            !hasPermissionsForSystemCamera(cPid, cUid)) {
+            !hasPermissionsForSystemCamera(cameraId, cPid, cUid)) {
         ALOGW("Rejecting access to system only camera %s, inadequete permissions",
                 cameraId.c_str());
         return true;
@@ -1940,9 +2021,9 @@
 
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-        const String16& unresolvedCameraId,
-        const String16& clientPackageName,
-        const std::optional<String16>& clientFeatureId,
+        const std::string& unresolvedCameraId,
+        const std::string& clientPackageName,
+        const std::optional<std::string>& clientFeatureId,
         int clientUid, int oomScoreOffset, int targetSdkVersion,
         bool overrideToPortrait,
         /*out*/
@@ -1950,25 +2031,25 @@
 
     ATRACE_CALL();
     Status ret = Status::ok();
-    String8 id = resolveCameraId(String8(unresolvedCameraId), clientPackageName);
+    const std::string cameraId = resolveCameraId(unresolvedCameraId, clientPackageName);
     sp<CameraDeviceClient> client = nullptr;
-    String16 clientPackageNameAdj = clientPackageName;
+    std::string clientPackageNameAdj = clientPackageName;
     int callingPid = CameraThreadState::getCallingPid();
     bool systemNativeClient = false;
     if (doesClientHaveSystemUid() && (clientPackageNameAdj.size() == 0)) {
         std::string systemClient =
-                StringPrintf("client.pid<%d>", CameraThreadState::getCallingPid());
-        clientPackageNameAdj = String16(systemClient.c_str());
+                fmt::sprintf("client.pid<%d>", CameraThreadState::getCallingPid());
+        clientPackageNameAdj = systemClient;
         systemNativeClient = true;
     }
 
     if (oomScoreOffset < 0) {
-        String8 msg =
-                String8::format("Cannot increase the priority of a client %s pid %d for "
-                        "camera id %s", String8(clientPackageNameAdj).string(), callingPid,
-                        id.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg =
+                fmt::sprintf("Cannot increase the priority of a client %s pid %d for "
+                        "camera id %s", clientPackageNameAdj.c_str(), callingPid,
+                        cameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     userid_t clientUserId = multiuser_get_user_id(clientUid);
@@ -1977,33 +2058,35 @@
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
-    if (mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
-        String8 msg =
-                String8::format("Camera disabled by device policy");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(ERROR_DISABLED, msg.string());
+    // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
+    // such as rear view and surround view cannot be disabled.
+    if ((!isAutomotivePrivilegedClient(callingUid) || !isAutomotiveExteriorSystemCamera(cameraId))
+            && mCameraServiceProxyWrapper->isCameraDisabled(clientUserId)) {
+        std::string msg = "Camera disabled by device policy";
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(ERROR_DISABLED, msg.c_str());
     }
 
     // enforce system camera permissions
-    if (oomScoreOffset > 0 &&
-            !hasPermissionsForSystemCamera(callingPid, CameraThreadState::getCallingUid()) &&
-            !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
-        String8 msg =
-                String8::format("Cannot change the priority of a client %s pid %d for "
+    if (oomScoreOffset > 0
+            && !hasPermissionsForSystemCamera(cameraId, callingPid,
+                    CameraThreadState::getCallingUid())
+            && !isTrustedCallingUid(CameraThreadState::getCallingUid())) {
+        std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
-                        String8(clientPackageNameAdj).string(), callingPid, id.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.string());
+                        clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.c_str());
     }
 
-    ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
-            /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,clientFeatureId,
+    ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
+            cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
             clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false,
-            String8(unresolvedCameraId), /*out*/client);
+            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
+            /*out*/client);
 
     if(!ret.isOk()) {
-        logRejected(id, callingPid, String8(clientPackageNameAdj), ret.toString8());
+        logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2034,11 +2117,11 @@
     return ret;
 }
 
-String16 CameraService::getPackageNameFromUid(int clientUid) {
-    String16 packageName("");
+std::string CameraService::getPackageNameFromUid(int clientUid) {
+    std::string packageName("");
 
     sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->getService(String16(kPermissionServiceName));
+    sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
     if (binder == 0) {
         ALOGE("Cannot get permission service");
         // Return empty package name and the further interaction
@@ -2059,22 +2142,24 @@
     }
 
     // Arbitrarily pick the first name in the list
-    packageName = packages[0];
+    packageName = toStdString(packages[0]);
 
     return packageName;
 }
 
 template<class CALLBACK, class CLIENT>
-Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-        int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
-        const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
+Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
+        int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
+        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
         apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode, const String8& originalCameraId,
+        bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
         /*out*/sp<CLIENT>& device) {
     binder::Status ret = binder::Status::ok();
 
     bool isNonSystemNdk = false;
-    String16 clientPackageName;
+    std::string clientPackageName;
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            CameraThreadState::getCallingUid() : clientUid;
     if (clientPackageNameMaybe.size() <= 0) {
         // NDK calls don't come with package names, but we need one for various cases.
         // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
@@ -2082,21 +2167,17 @@
         // same permissions, so picking any associated package name is sufficient. For some
         // other cases, this may give inaccurate names for clients in logs.
         isNonSystemNdk = true;
-        int packageUid = (clientUid == USE_CALLING_UID) ?
-            CameraThreadState::getCallingUid() : clientUid;
         clientPackageName = getPackageNameFromUid(packageUid);
     } else {
         clientPackageName = clientPackageNameMaybe;
     }
 
-    String8 clientName8(clientPackageName);
-
     int originalClientPid = 0;
 
     int packagePid = (clientPid == USE_CALLING_PID) ?
         CameraThreadState::getCallingPid() : clientPid;
     ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
-            "Camera API version %d", packagePid, clientName8.string(), cameraId.string(),
+            "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
             static_cast<int>(effectiveApiLevel));
 
     nsecs_t openTimeNs = systemTime();
@@ -2115,11 +2196,11 @@
                     , clientPid);
             return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
                     "Cannot open camera %s for \"%s\" (PID %d): Too many other clients connecting",
-                    cameraId.string(), clientName8.string(), clientPid);
+                    cameraId.c_str(), clientPackageName.c_str(), clientPid);
         }
 
         // Enforce client permissions and do basic validity checks
-        if(!(ret = validateConnectLocked(cameraId, clientName8,
+        if(!(ret = validateConnectLocked(cameraId, clientPackageName,
                 /*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
             return ret;
         }
@@ -2136,27 +2217,27 @@
         status_t err;
 
         sp<BasicClient> clientTmp = nullptr;
-        std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>> partial;
+        std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>> partial;
         if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel,
-                IInterface::asBinder(cameraCb), clientName8, oomScoreOffset, systemNativeClient,
-                /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
+                IInterface::asBinder(cameraCb), clientPackageName, oomScoreOffset,
+                systemNativeClient, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
             switch (err) {
                 case -ENODEV:
                     return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
                             "No camera device with ID \"%s\" currently available",
-                            cameraId.string());
+                            cameraId.c_str());
                 case -EBUSY:
                     return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
                             "Higher-priority client using camera, ID \"%s\" currently unavailable",
-                            cameraId.string());
+                            cameraId.c_str());
                 case -EUSERS:
                     return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
                             "Too many cameras already open, cannot open camera \"%s\"",
-                            cameraId.string());
+                            cameraId.c_str());
                 default:
                     return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                             "Unexpected error %s (%d) opening camera \"%s\"",
-                            strerror(-err), err, cameraId.string());
+                            strerror(-err), err, cameraId.c_str());
             }
         }
 
@@ -2174,17 +2255,17 @@
                 getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
                         /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
-            ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.string());
+            ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.c_str());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
-                    "Unable to get camera device \"%s\" facing", cameraId.string());
+                    "Unable to get camera device \"%s\" facing", cameraId.c_str());
         }
 
         sp<BasicClient> tmp = nullptr;
         bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
-                mPerfClassPrimaryCameraIds, cameraId.string(), targetSdkVersion);
+                mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
-                clientFeatureId, cameraId, api1CameraId, facing, orientation,
-                clientPid, clientUid, getpid(),
+                clientFeatureId, cameraId, api1CameraId, facing,
+                orientation, clientPid, clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
                 overrideToPortrait, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
@@ -2195,7 +2276,7 @@
         LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
                 __FUNCTION__);
 
-        String8 monitorTags = isClientWatched(client.get()) ? mMonitorTags : String8("");
+        std::string monitorTags = isClientWatched(client.get()) ? mMonitorTags : std::string();
         err = client->initialize(mCameraProviderManager, monitorTags);
         if (err != OK) {
             ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
@@ -2206,24 +2287,24 @@
             switch(err) {
                 case BAD_VALUE:
                     return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                            "Illegal argument to HAL module for camera \"%s\"", cameraId.string());
+                            "Illegal argument to HAL module for camera \"%s\"", cameraId.c_str());
                 case -EBUSY:
                     return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
-                            "Camera \"%s\" is already open", cameraId.string());
+                            "Camera \"%s\" is already open", cameraId.c_str());
                 case -EUSERS:
                     return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
                             "Too many cameras already open, cannot open camera \"%s\"",
-                            cameraId.string());
+                            cameraId.c_str());
                 case PERMISSION_DENIED:
                     return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                            "No permission to open camera \"%s\"", cameraId.string());
+                            "No permission to open camera \"%s\"", cameraId.c_str());
                 case -EACCES:
                     return STATUS_ERROR_FMT(ERROR_DISABLED,
-                            "Camera \"%s\" disabled by policy", cameraId.string());
+                            "Camera \"%s\" disabled by policy", cameraId.c_str());
                 case -ENODEV:
                 default:
                     return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
-                            "Failed to initialize camera \"%s\": %s (%d)", cameraId.string(),
+                            "Failed to initialize camera \"%s\": %s (%d)", cameraId.c_str(),
                             strerror(-err), err);
             }
         }
@@ -2240,7 +2321,7 @@
                 cameraState->setShimParams(params);
             } else {
                 ALOGE("%s: Cannot update shim parameters for camera %s, no such device exists.",
-                        __FUNCTION__, cameraId.string());
+                        __FUNCTION__, cameraId.c_str());
             }
         }
 
@@ -2282,32 +2363,38 @@
                     clientPackageName));
         }
 
-        // Set camera muting behavior
-        bool isCameraPrivacyEnabled =
-                mSensorPrivacyPolicy->isCameraPrivacyEnabled();
-        if (client->supportsCameraMute()) {
-            client->setCameraMute(
-                    mOverrideCameraMuteMode || isCameraPrivacyEnabled);
-        } else if (isCameraPrivacyEnabled) {
-            // no camera mute supported, but privacy is on! => disconnect
-            ALOGI("Camera mute not supported for package: %s, camera id: %s",
-                    String8(client->getPackageName()).string(), cameraId.string());
-            // Do not hold mServiceLock while disconnecting clients, but
-            // retain the condition blocking other clients from connecting
-            // in mServiceLockWrapper if held.
-            mServiceLock.unlock();
-            // Clear caller identity temporarily so client disconnect PID
-            // checks work correctly
-            int64_t token = CameraThreadState::clearCallingIdentity();
-            // Note AppOp to trigger the "Unblock" dialog
-            client->noteAppOp();
-            client->disconnect();
-            CameraThreadState::restoreCallingIdentity(token);
-            // Reacquire mServiceLock
-            mServiceLock.lock();
+        // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use
+        // cases such as rear view and surround view cannot be disabled and are exempt from camera
+        // privacy policy.
+        if ((!isAutomotivePrivilegedClient(packageUid) ||
+                !isAutomotiveExteriorSystemCamera(cameraId))) {
+            // Set camera muting behavior.
+            bool isCameraPrivacyEnabled =
+                    mSensorPrivacyPolicy->isCameraPrivacyEnabled();
+            if (client->supportsCameraMute()) {
+                client->setCameraMute(
+                        mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+            } else if (isCameraPrivacyEnabled) {
+                // no camera mute supported, but privacy is on! => disconnect
+                ALOGI("Camera mute not supported for package: %s, camera id: %s",
+                        client->getPackageName().c_str(), cameraId.c_str());
+                // Do not hold mServiceLock while disconnecting clients, but
+                // retain the condition blocking other clients from connecting
+                // in mServiceLockWrapper if held.
+                mServiceLock.unlock();
+                // Clear caller identity temporarily so client disconnect PID
+                // checks work correctly
+                int64_t token = CameraThreadState::clearCallingIdentity();
+                // Note AppOp to trigger the "Unblock" dialog
+                client->noteAppOp();
+                client->disconnect();
+                CameraThreadState::restoreCallingIdentity(token);
+                // Reacquire mServiceLock
+                mServiceLock.lock();
 
-            return STATUS_ERROR_FMT(ERROR_DISABLED,
-                    "Camera \"%s\" disabled due to camera mute", cameraId.string());
+                return STATUS_ERROR_FMT(ERROR_DISABLED,
+                        "Camera \"%s\" disabled due to camera mute", cameraId.c_str());
+            }
         }
 
         if (shimUpdateOnly) {
@@ -2345,7 +2432,7 @@
                 if(res != OK) {
                     return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
                             "No camera device with ID \"%s\" currently available",
-                            mInjectionExternalCamId.string());
+                            mInjectionExternalCamId.c_str());
                 }
                 res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
                 if (res != OK) {
@@ -2353,7 +2440,7 @@
                 }
             } else {
                 ALOGE("%s: Internal camera ID = %s 's client does not exist!",
-                        __FUNCTION__, mInjectionInternalCamId.string());
+                        __FUNCTION__, mInjectionInternalCamId.c_str());
                 res = NO_INIT;
                 mInjectionStatusListener->notifyInjectionError(mInjectionExternalCamId, res);
             }
@@ -2363,7 +2450,8 @@
     return ret;
 }
 
-status_t CameraService::addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient) {
+status_t CameraService::addOfflineClient(const std::string &cameraId,
+        sp<BasicClient> offlineClient) {
     if (offlineClient.get() == nullptr) {
         return BAD_VALUE;
     }
@@ -2392,7 +2480,7 @@
         const auto& onlinePriority = onlineClientDesc->getPriority();
         auto offlineClientDesc = CameraClientManager::makeClientDescriptor(
                 kOfflineDevice + onlineClientDesc->getKey(), offlineClient, /*cost*/ 0,
-                /*conflictingKeys*/ std::set<String8>(), onlinePriority.getScore(),
+                /*conflictingKeys*/ std::set<std::string>(), onlinePriority.getScore(),
                 onlineClientDesc->getOwnerId(), onlinePriority.getState(),
                 // native clients don't have offline processing support.
                 /*ommScoreOffset*/ 0, /*systemNativeClient*/false);
@@ -2408,7 +2496,8 @@
             return BAD_VALUE;
         }
 
-        String8 monitorTags = isClientWatched(offlineClient.get()) ? mMonitorTags : String8("");
+        std::string monitorTags = isClientWatched(offlineClient.get())
+                ? mMonitorTags : std::string();
         auto err = offlineClient->initialize(mCameraProviderManager, monitorTags);
         if (err != OK) {
             ALOGE("%s: Could not initialize offline client.", __FUNCTION__);
@@ -2419,7 +2508,7 @@
         if (evicted.size() > 0) {
             for (auto& i : evicted) {
                 ALOGE("%s: Invalid state: Offline client for camera %s was not removed ",
-                        __FUNCTION__, i->getKey().string());
+                        __FUNCTION__, i->getKey().c_str());
             }
 
             LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, offline clients not evicted "
@@ -2430,7 +2519,7 @@
 
         logConnectedOffline(offlineClientDesc->getKey(),
                 static_cast<int>(offlineClientDesc->getOwnerId()),
-                String8(offlineClient->getPackageName()));
+                offlineClient->getPackageName());
 
         sp<IBinder> remoteCallback = offlineClient->getRemote();
         if (remoteCallback != nullptr) {
@@ -2441,9 +2530,8 @@
     return OK;
 }
 
-Status CameraService::turnOnTorchWithStrengthLevel(const String16& unresolvedCameraId,
-        int32_t torchStrength,
-        const sp<IBinder>& clientBinder) {
+Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
+        int32_t torchStrength, const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2453,108 +2541,108 @@
                 "Torch client binder in null.");
     }
 
-    String8 id = resolveCameraId(String8(unresolvedCameraId));
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
     int uid = CameraThreadState::getCallingUid();
 
-    if (shouldRejectSystemCameraConnection(id)) {
+    if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to change the strength level"
-                "for system only device %s: ", id.string());
+                "for system only device %s: ", cameraId.c_str());
     }
 
     // verify id is valid
-    auto state = getCameraState(id);
+    auto state = getCameraState(cameraId);
     if (state == nullptr) {
-        ALOGE("%s: camera id is invalid %s", __FUNCTION__, id.string());
+        ALOGE("%s: camera id is invalid %s", __FUNCTION__, cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-            "Camera ID \"%s\" is a not valid camera ID", id.string());
+            "Camera ID \"%s\" is a not valid camera ID", cameraId.c_str());
     }
 
     StatusInternal cameraStatus = state->getStatus();
     if (cameraStatus != StatusInternal::NOT_AVAILABLE &&
             cameraStatus != StatusInternal::PRESENT) {
-        ALOGE("%s: camera id is invalid %s, status %d", __FUNCTION__, id.string(),
+        ALOGE("%s: camera id is invalid %s, status %d", __FUNCTION__, cameraId.c_str(),
             (int)cameraStatus);
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                "Camera ID \"%s\" is a not valid camera ID", id.string());
+                "Camera ID \"%s\" is a not valid camera ID", cameraId.c_str());
     }
 
     {
         Mutex::Autolock al(mTorchStatusMutex);
         TorchModeStatus status;
-        status_t err = getTorchStatusLocked(id, &status);
+        status_t err = getTorchStatusLocked(cameraId, &status);
         if (err != OK) {
             if (err == NAME_NOT_FOUND) {
              return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "Camera \"%s\" does not have a flash unit", id.string());
+                    "Camera \"%s\" does not have a flash unit", cameraId.c_str());
             }
             ALOGE("%s: getting current torch status failed for camera %s",
-                    __FUNCTION__, id.string());
+                    __FUNCTION__, cameraId.c_str());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                     "Error changing torch strength level for camera \"%s\": %s (%d)",
-                    id.string(), strerror(-err), err);
+                    cameraId.c_str(), strerror(-err), err);
         }
 
         if (status == TorchModeStatus::NOT_AVAILABLE) {
             if (cameraStatus == StatusInternal::NOT_AVAILABLE) {
                 ALOGE("%s: torch mode of camera %s is not available because "
-                        "camera is in use.", __FUNCTION__, id.string());
+                        "camera is in use.", __FUNCTION__, cameraId.c_str());
                 return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
                         "Torch for camera \"%s\" is not available due to an existing camera user",
-                        id.string());
+                        cameraId.c_str());
             } else {
                 ALOGE("%s: torch mode of camera %s is not available due to "
-                       "insufficient resources", __FUNCTION__, id.string());
+                       "insufficient resources", __FUNCTION__, cameraId.c_str());
                 return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
                         "Torch for camera \"%s\" is not available due to insufficient resources",
-                        id.string());
+                        cameraId.c_str());
             }
         }
     }
 
     {
         Mutex::Autolock al(mTorchUidMapMutex);
-        updateTorchUidMapLocked(String16(id), uid);
+        updateTorchUidMapLocked(cameraId, uid);
     }
     // Check if the current torch strength level is same as the new one.
     bool shouldSkipTorchStrengthUpdates = mCameraProviderManager->shouldSkipTorchStrengthUpdate(
-            id.string(), torchStrength);
+            cameraId, torchStrength);
 
-    status_t err = mFlashlight->turnOnTorchWithStrengthLevel(id, torchStrength);
+    status_t err = mFlashlight->turnOnTorchWithStrengthLevel(cameraId, torchStrength);
 
     if (err != OK) {
         int32_t errorCode;
-        String8 msg;
+        std::string msg;
         switch (err) {
             case -ENOSYS:
-                msg = String8::format("Camera \"%s\" has no flashlight.",
-                    id.string());
+                msg = fmt::sprintf("Camera \"%s\" has no flashlight.",
+                    cameraId.c_str());
                 errorCode = ERROR_ILLEGAL_ARGUMENT;
                 break;
             case -EBUSY:
-                msg = String8::format("Camera \"%s\" is in use",
-                    id.string());
+                msg = fmt::sprintf("Camera \"%s\" is in use",
+                    cameraId.c_str());
                 errorCode = ERROR_CAMERA_IN_USE;
                 break;
             case -EINVAL:
-                msg = String8::format("Torch strength level %d is not within the "
+                msg = fmt::sprintf("Torch strength level %d is not within the "
                         "valid range.", torchStrength);
                 errorCode = ERROR_ILLEGAL_ARGUMENT;
                 break;
             default:
-                msg = String8::format("Changing torch strength level failed.");
+                msg = "Changing torch strength level failed.";
                 errorCode = ERROR_INVALID_OPERATION;
         }
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(errorCode, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(errorCode, msg.c_str());
     }
 
     {
         // update the link to client's death
         // Store the last client that turns on each camera's torch mode.
         Mutex::Autolock al(mTorchClientMapMutex);
-        ssize_t index = mTorchClientMap.indexOfKey(id);
+        ssize_t index = mTorchClientMap.indexOfKey(cameraId);
         if (index == NAME_NOT_FOUND) {
-            mTorchClientMap.add(id, clientBinder);
+            mTorchClientMap.add(cameraId, clientBinder);
         } else {
             mTorchClientMap.valueAt(index)->unlinkToDeath(this);
             mTorchClientMap.replaceValueAt(index, clientBinder);
@@ -2563,17 +2651,15 @@
     }
 
     int clientPid = CameraThreadState::getCallingPid();
-    const char *id_cstr = id.c_str();
     ALOGI("%s: Torch strength for camera id %s changed to %d for client PID %d",
-            __FUNCTION__, id_cstr, torchStrength, clientPid);
+            __FUNCTION__, cameraId.c_str(), torchStrength, clientPid);
     if (!shouldSkipTorchStrengthUpdates) {
-        broadcastTorchStrengthLevel(id, torchStrength);
+        broadcastTorchStrengthLevel(cameraId, torchStrength);
     }
     return Status::ok();
 }
 
-Status CameraService::setTorchMode(const String16& unresolvedCameraId,
-        bool enabled,
+Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
         const sp<IBinder>& clientBinder) {
     Mutex::Autolock lock(mServiceLock);
 
@@ -2584,58 +2670,59 @@
                 "Torch client Binder is null");
     }
 
-    String8 id = resolveCameraId(String8(unresolvedCameraId));
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
     int uid = CameraThreadState::getCallingUid();
 
-    if (shouldRejectSystemCameraConnection(id)) {
+    if (shouldRejectSystemCameraConnection(cameraId)) {
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
-                " for system only device %s: ", id.string());
+                " for system only device %s: ", cameraId.c_str());
     }
     // verify id is valid.
-    auto state = getCameraState(id);
+    auto state = getCameraState(cameraId);
     if (state == nullptr) {
-        ALOGE("%s: camera id is invalid %s", __FUNCTION__, id.string());
+        ALOGE("%s: camera id is invalid %s", __FUNCTION__, cameraId.c_str());
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                "Camera ID \"%s\" is a not valid camera ID", id.string());
+                "Camera ID \"%s\" is a not valid camera ID", cameraId.c_str());
     }
 
     StatusInternal cameraStatus = state->getStatus();
     if (cameraStatus != StatusInternal::PRESENT &&
             cameraStatus != StatusInternal::NOT_AVAILABLE) {
-        ALOGE("%s: camera id is invalid %s, status %d", __FUNCTION__, id.string(), (int)cameraStatus);
+        ALOGE("%s: camera id is invalid %s, status %d", __FUNCTION__, cameraId.c_str(),
+                (int)cameraStatus);
         return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                "Camera ID \"%s\" is a not valid camera ID", id.string());
+                "Camera ID \"%s\" is a not valid camera ID", cameraId.c_str());
     }
 
     {
         Mutex::Autolock al(mTorchStatusMutex);
         TorchModeStatus status;
-        status_t err = getTorchStatusLocked(id, &status);
+        status_t err = getTorchStatusLocked(cameraId, &status);
         if (err != OK) {
             if (err == NAME_NOT_FOUND) {
                 return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "Camera \"%s\" does not have a flash unit", id.string());
+                        "Camera \"%s\" does not have a flash unit", cameraId.c_str());
             }
             ALOGE("%s: getting current torch status failed for camera %s",
-                    __FUNCTION__, id.string());
+                    __FUNCTION__, cameraId.c_str());
             return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
-                    "Error updating torch status for camera \"%s\": %s (%d)", id.string(),
+                    "Error updating torch status for camera \"%s\": %s (%d)", cameraId.c_str(),
                     strerror(-err), err);
         }
 
         if (status == TorchModeStatus::NOT_AVAILABLE) {
             if (cameraStatus == StatusInternal::NOT_AVAILABLE) {
                 ALOGE("%s: torch mode of camera %s is not available because "
-                        "camera is in use", __FUNCTION__, id.string());
+                        "camera is in use", __FUNCTION__, cameraId.c_str());
                 return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
                         "Torch for camera \"%s\" is not available due to an existing camera user",
-                        id.string());
+                        cameraId.c_str());
             } else {
                 ALOGE("%s: torch mode of camera %s is not available due to "
-                        "insufficient resources", __FUNCTION__, id.string());
+                        "insufficient resources", __FUNCTION__, cameraId.c_str());
                 return STATUS_ERROR_FMT(ERROR_MAX_CAMERAS_IN_USE,
                         "Torch for camera \"%s\" is not available due to insufficient resources",
-                        id.string());
+                        cameraId.c_str());
             }
         }
     }
@@ -2644,43 +2731,43 @@
         // Update UID map - this is used in the torch status changed callbacks, so must be done
         // before setTorchMode
         Mutex::Autolock al(mTorchUidMapMutex);
-        updateTorchUidMapLocked(String16(id), uid);
+        updateTorchUidMapLocked(cameraId, uid);
     }
 
-    status_t err = mFlashlight->setTorchMode(id, enabled);
+    status_t err = mFlashlight->setTorchMode(cameraId, enabled);
 
     if (err != OK) {
         int32_t errorCode;
-        String8 msg;
+        std::string msg;
         switch (err) {
             case -ENOSYS:
-                msg = String8::format("Camera \"%s\" has no flashlight",
-                    id.string());
+                msg = fmt::sprintf("Camera \"%s\" has no flashlight",
+                    cameraId.c_str());
                 errorCode = ERROR_ILLEGAL_ARGUMENT;
                 break;
             case -EBUSY:
-                msg = String8::format("Camera \"%s\" is in use",
-                    id.string());
+                msg = fmt::sprintf("Camera \"%s\" is in use",
+                    cameraId.c_str());
                 errorCode = ERROR_CAMERA_IN_USE;
                 break;
             default:
-                msg = String8::format(
+                msg = fmt::sprintf(
                     "Setting torch mode of camera \"%s\" to %d failed: %s (%d)",
-                    id.string(), enabled, strerror(-err), err);
+                    cameraId.c_str(), enabled, strerror(-err), err);
                 errorCode = ERROR_INVALID_OPERATION;
         }
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        logServiceError(msg,errorCode);
-        return STATUS_ERROR(errorCode, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        logServiceError(msg, errorCode);
+        return STATUS_ERROR(errorCode, msg.c_str());
     }
 
     {
         // update the link to client's death
         Mutex::Autolock al(mTorchClientMapMutex);
-        ssize_t index = mTorchClientMap.indexOfKey(id);
+        ssize_t index = mTorchClientMap.indexOfKey(cameraId);
         if (enabled) {
             if (index == NAME_NOT_FOUND) {
-                mTorchClientMap.add(id, clientBinder);
+                mTorchClientMap.add(cameraId, clientBinder);
             } else {
                 mTorchClientMap.valueAt(index)->unlinkToDeath(this);
                 mTorchClientMap.replaceValueAt(index, clientBinder);
@@ -2692,21 +2779,20 @@
     }
 
     int clientPid = CameraThreadState::getCallingPid();
-    const char *id_cstr = id.c_str();
-    const char *torchState = enabled ? "on" : "off";
-    ALOGI("Torch for camera id %s turned %s for client PID %d", id_cstr, torchState, clientPid);
-    logTorchEvent(id_cstr, torchState , clientPid);
+    std::string torchState = enabled ? "on" : "off";
+    ALOGI("Torch for camera id %s turned %s for client PID %d", cameraId.c_str(),
+            torchState.c_str(), clientPid);
+    logTorchEvent(cameraId, torchState, clientPid);
     return Status::ok();
 }
 
-void CameraService::updateTorchUidMapLocked(const String16& cameraId, int uid) {
-    String8 id = String8(cameraId.string());
-    if (mTorchUidMap.find(id) == mTorchUidMap.end()) {
-        mTorchUidMap[id].first = uid;
-        mTorchUidMap[id].second = uid;
+void CameraService::updateTorchUidMapLocked(const std::string& cameraId, int uid) {
+    if (mTorchUidMap.find(cameraId) == mTorchUidMap.end()) {
+        mTorchUidMap[cameraId].first = uid;
+        mTorchUidMap[cameraId].second = uid;
     } else {
         // Set the pending UID
-        mTorchUidMap[id].first = uid;
+        mTorchUidMap[cameraId].first = uid;
     }
 }
 
@@ -2719,7 +2805,7 @@
     if (pid != selfPid) {
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
-        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+        if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
             const int uid = CameraThreadState::getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about system"
                     " events from pid=%d, uid=%d", pid, uid);
@@ -2793,7 +2879,7 @@
     if (pid != selfPid) {
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
-        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+        if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
             const int uid = CameraThreadState::getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about device"
                     " state changes from pid=%d, uid=%d", pid, uid);
@@ -2824,7 +2910,7 @@
     if (callingPid != selfPid) {
         // Ensure we're being called by system_server, or similar process with
         // permissions to notify the camera service about system events
-        if (!checkCallingPermission(sCameraSendSystemEventsPermission)) {
+        if (!checkCallingPermission(toString16(sCameraSendSystemEventsPermission))) {
             const int uid = CameraThreadState::getCallingUid();
             ALOGE("Permission Denial: cannot send updates to camera service about orientation"
                     " changes from pid=%d, uid=%d", callingPid, uid);
@@ -2866,7 +2952,7 @@
 
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
-        logServiceError(String8::format("Camera subsystem is not available"),ERROR_DISCONNECTED);
+        logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED,
                 "Camera subsystem is not available");
     }
@@ -2878,8 +2964,7 @@
         std::vector<std::string> validCombination;
         for (auto &cameraId : combination) {
             // if the camera state is not present, skip
-            String8 cameraIdStr(cameraId.c_str());
-            auto state = getCameraState(cameraIdStr);
+            auto state = getCameraState(cameraId);
             if (state == nullptr) {
                 ALOGW("%s: camera id %s does not exist", __FUNCTION__, cameraId.c_str());
                 continue;
@@ -2888,7 +2973,7 @@
             if (status == StatusInternal::NOT_PRESENT || status == StatusInternal::ENUMERATING) {
                 continue;
             }
-            if (shouldRejectSystemCameraConnection(cameraIdStr)) {
+            if (shouldRejectSystemCameraConnection(cameraId)) {
                 continue;
             }
             validCombination.push_back(cameraId);
@@ -2917,13 +3002,11 @@
     // Check for camera permissions
     int callingPid = CameraThreadState::getCallingPid();
     int callingUid = CameraThreadState::getCallingUid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.pid = callingPid;
     attributionSource.uid = callingUid;
-    bool checkPermissionForCamera = permissionChecker.checkPermissionForPreflight(
-                sCameraPermission, attributionSource, String16(), AppOpsManager::OP_NONE)
-                != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+    bool checkPermissionForCamera = checkPermission(std::string(),
+                sCameraPermission, attributionSource, std::string(), AppOpsManager::OP_NONE);
     if ((callingPid != getpid()) && !checkPermissionForCamera) {
         ALOGE("%s: pid %d doesn't have camera permissions", __FUNCTION__, callingPid);
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
@@ -2936,7 +3019,7 @@
                     cameraIdsAndSessionConfigurations, mPerfClassPrimaryCameraIds,
                     targetSdkVersion, isSupported);
     if (res != OK) {
-        logServiceError(String8::format("Unable to query session configuration support"),
+        logServiceError("Unable to query session configuration support",
             ERROR_INVALID_OPERATION);
         return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to query session configuration "
                 "support %s (%d)", strerror(-res), res);
@@ -2971,13 +3054,13 @@
 
     auto clientUid = CameraThreadState::getCallingUid();
     auto clientPid = CameraThreadState::getCallingPid();
-    permission::PermissionChecker permissionChecker;
     AttributionSourceState attributionSource{};
     attributionSource.uid = clientUid;
     attributionSource.pid = clientPid;
-    bool openCloseCallbackAllowed = permissionChecker.checkPermissionForPreflight(
-            sCameraOpenCloseListenerPermission, attributionSource, String16(),
-            AppOpsManager::OP_NONE) != permission::PermissionChecker::PERMISSION_HARD_DENIED;
+
+   bool openCloseCallbackAllowed = checkPermission(std::string(),
+            sCameraOpenCloseListenerPermission, attributionSource, std::string(),
+            AppOpsManager::OP_NONE);
 
     Mutex::Autolock lock(mServiceLock);
 
@@ -2996,11 +3079,11 @@
                         openCloseCallbackAllowed);
         auto ret = serviceListener->initialize(isProcessLocalTest);
         if (ret != NO_ERROR) {
-            String8 msg = String8::format("Failed to initialize service listener: %s (%d)",
+            std::string msg = fmt::sprintf("Failed to initialize service listener: %s (%d)",
                     strerror(-ret), ret);
-            logServiceError(msg,ERROR_ILLEGAL_ARGUMENT);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+            logServiceError(msg, ERROR_ILLEGAL_ARGUMENT);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
         // The listener still needs to be added to the list of listeners, regardless of what
         // permissions the listener process has / whether it is a vendor listener. Since it might be
@@ -3015,7 +3098,7 @@
         for (auto& i : mCameraStates) {
             cameraStatuses->emplace_back(i.first,
                     mapToInterface(i.second->getStatus()), i.second->getUnavailablePhysicalIds(),
-                    openCloseCallbackAllowed ? i.second->getClientPackage() : String8::empty());
+                    openCloseCallbackAllowed ? i.second->getClientPackage() : std::string());
         }
     }
     // Remove the camera statuses that should be hidden from the client, we do
@@ -3034,9 +3117,9 @@
                             clientUid);}), cameraStatuses->end());
 
     //cameraStatuses will have non-eligible camera ids removed.
-    std::set<String16> idsChosenForCallback;
+    std::set<std::string> idsChosenForCallback;
     for (const auto &s : *cameraStatuses) {
-        idsChosenForCallback.insert(String16(s.cameraId));
+        idsChosenForCallback.insert(s.cameraId);
     }
 
     /*
@@ -3046,7 +3129,7 @@
     {
         Mutex::Autolock al(mTorchStatusMutex);
         for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
-            String16 id = String16(mTorchStatusMap.keyAt(i).string());
+            const std::string &id = mTorchStatusMap.keyAt(i);
             // The camera id is visible to the client. Fine to send torch
             // callback.
             if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
@@ -3088,7 +3171,7 @@
     return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Unregistered listener given to removeListener");
 }
 
-Status CameraService::getLegacyParameters(int cameraId, /*out*/String16* parameters) {
+Status CameraService::getLegacyParameters(int cameraId, /*out*/std::string* parameters) {
 
     ATRACE_CALL();
     ALOGV("%s: for camera ID = %d", __FUNCTION__, cameraId);
@@ -3107,46 +3190,44 @@
     }
 
     String8 shimParamsString8 = shimParams.flatten();
-    String16 shimParamsString16 = String16(shimParamsString8);
 
-    *parameters = shimParamsString16;
+    *parameters = toStdString(shimParamsString8);
 
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const String16& unresolvedCameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    String8 resolvedId;
+    std::string resolvedCameraId;
     if (apiVersion == API_VERSION_2) {
-        resolvedId = resolveCameraId(String8(unresolvedCameraId));
+        resolvedCameraId = resolveCameraId(unresolvedCameraId);
     } else { // if (apiVersion == API_VERSION_1)
         // We don't support remapping for API 1.
         // TODO(b/286287541): Also support remapping for API 1.
-        resolvedId = String8(unresolvedCameraId);
+        resolvedCameraId = unresolvedCameraId;
     }
+    const std::string cameraId = resolvedCameraId;
 
-    const String8 id = resolvedId;
-
-    ALOGV("%s: for camera ID = %s", __FUNCTION__, id.string());
+    ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
     switch (apiVersion) {
         case API_VERSION_1:
         case API_VERSION_2:
             break;
         default:
-            String8 msg = String8::format("Unknown API version %d", apiVersion);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+            std::string msg = fmt::sprintf("Unknown API version %d", apiVersion);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     int portraitRotation;
-    auto deviceVersionAndTransport = getDeviceVersion(id, false, &portraitRotation);
+    auto deviceVersionAndTransport = getDeviceVersion(cameraId, false, &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
-        String8 msg = String8::format("Unknown camera ID %s", id.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Unknown camera ID %s", cameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (deviceVersionAndTransport.second == IPCTransport::HIDL) {
         int deviceVersion = deviceVersionAndTransport.first;
@@ -3156,11 +3237,11 @@
             case CAMERA_DEVICE_API_VERSION_3_1:
                 if (apiVersion == API_VERSION_2) {
                     ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without "
-                            "shim", __FUNCTION__, id.string(), deviceVersion);
+                            "shim", __FUNCTION__, cameraId.c_str(), deviceVersion);
                     *isSupported = false;
                 } else { // if (apiVersion == API_VERSION_1) {
                     ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always "
-                            "supported", __FUNCTION__, id.string());
+                            "supported", __FUNCTION__, cameraId.c_str());
                     *isSupported = true;
                 }
                 break;
@@ -3171,14 +3252,14 @@
             case CAMERA_DEVICE_API_VERSION_3_6:
             case CAMERA_DEVICE_API_VERSION_3_7:
                 ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
-                        __FUNCTION__, id.string());
+                        __FUNCTION__, cameraId.c_str());
                 *isSupported = true;
                 break;
             default: {
-                String8 msg = String8::format("Unknown device version %x for device %s",
-                        deviceVersion, id.string());
-                ALOGE("%s: %s", __FUNCTION__, msg.string());
-                return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.string());
+                std::string msg = fmt::sprintf("Unknown device version %x for device %s",
+                        deviceVersion, cameraId.c_str());
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.c_str());
             }
         }
     } else {
@@ -3187,27 +3268,25 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const String16& unresolvedCameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
-
-    const String8 id = resolveCameraId(String8(unresolvedCameraId));
-
-    ALOGV("%s: for camera ID = %s", __FUNCTION__, id.string());
-    *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(id.string());
+    const std::string cameraId = resolveCameraId(unresolvedCameraId);
+    ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
+    *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
 
     return Status::ok();
 }
 
 Status CameraService::injectCamera(
-        const String16& packageName, const String16& internalCamId,
-        const String16& externalCamId,
+        const std::string& packageName, const std::string& internalCamId,
+        const std::string& externalCamId,
         const sp<ICameraInjectionCallback>& callback,
         /*out*/
         sp<ICameraInjectionSession>* cameraInjectionSession) {
     ATRACE_CALL();
 
-    if (!checkCallingPermission(sCameraInjectExternalCameraPermission)) {
+    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
         const int pid = CameraThreadState::getCallingPid();
         const int uid = CameraThreadState::getCallingUid();
         ALOGE("Permission Denial: can't inject camera pid=%d, uid=%d", pid, uid);
@@ -3218,13 +3297,13 @@
     ALOGV(
         "%s: Package name = %s, Internal camera ID = %s, External camera ID = "
         "%s",
-        __FUNCTION__, String8(packageName).string(),
-        String8(internalCamId).string(), String8(externalCamId).string());
+        __FUNCTION__, packageName.c_str(),
+        internalCamId.c_str(), externalCamId.c_str());
 
     {
         Mutex::Autolock lock(mInjectionParametersLock);
-        mInjectionInternalCamId = String8(internalCamId);
-        mInjectionExternalCamId = String8(externalCamId);
+        mInjectionInternalCamId = internalCamId;
+        mInjectionExternalCamId = externalCamId;
         mInjectionStatusListener->addListener(callback);
         *cameraInjectionSession = new CameraInjectionSession(this);
         status_t res = NO_ERROR;
@@ -3239,7 +3318,7 @@
             if(res != OK) {
                 return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
                         "No camera device with ID \"%s\" currently available",
-                        mInjectionExternalCamId.string());
+                        mInjectionExternalCamId.c_str());
             }
             res = clientSp->injectCamera(mInjectionExternalCamId, mCameraProviderManager);
             if(res != OK) {
@@ -3254,7 +3333,7 @@
 }
 
 Status CameraService::reportExtensionSessionStats(
-        const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/) {
+        const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/) {
     ALOGV("%s: reported %s", __FUNCTION__, stats.toString().c_str());
     *sessionKey = mCameraServiceProxyWrapper->updateExtensionStats(stats);
     return Status::ok();
@@ -3321,7 +3400,7 @@
 }
 
 std::shared_ptr<CameraService::CameraState> CameraService::getCameraState(
-        const String8& cameraId) const {
+        const std::string& cameraId) const {
     std::shared_ptr<CameraState> state;
     {
         Mutex::Autolock lock(mCameraStatesLock);
@@ -3333,12 +3412,12 @@
     return state;
 }
 
-sp<CameraService::BasicClient> CameraService::removeClientLocked(const String8& cameraId) {
+sp<CameraService::BasicClient> CameraService::removeClientLocked(const std::string& cameraId) {
     // Remove from active clients list
     auto clientDescriptorPtr = mActiveClientManager.remove(cameraId);
     if (clientDescriptorPtr == nullptr) {
         ALOGW("%s: Could not evict client, no client for camera ID %s", __FUNCTION__,
-                cameraId.string());
+                cameraId.c_str());
         return sp<BasicClient>{nullptr};
     }
 
@@ -3393,16 +3472,14 @@
 
         evicted.push_back(clientSp);
 
-        String8 curTime = getFormattedCurrentTime();
-
         ALOGE("Evicting conflicting client for camera ID %s due to user change",
-                i->getKey().string());
+                i->getKey().c_str());
 
         // Log the clients evicted
-        logEvent(String8::format("EVICT device %s client held by package %s (PID %"
+        logEvent(fmt::sprintf("EVICT device %s client held by package %s (PID %"
                 PRId32 ", score %" PRId32 ", state %" PRId32 ")\n   - Evicted due"
-                " to user switch.", i->getKey().string(),
-                String8{clientSp->getPackageName()}.string(),
+                " to user switch.", i->getKey().c_str(),
+                clientSp->getPackageName().c_str(),
                 i->getOwnerId(), i->getPriority().getScore(),
                 i->getPriority().getState()));
 
@@ -3425,12 +3502,12 @@
     mServiceLock.lock();
 }
 
-void CameraService::logEvent(const char* event) {
-    String8 curTime = getFormattedCurrentTime();
+void CameraService::logEvent(const std::string &event) {
+    std::string curTime = getFormattedCurrentTime();
     Mutex::Autolock l(mLogLock);
-    String8 msg = String8::format("%s : %s", curTime.string(), event);
+    std::string msg = curTime + " : " + event;
     // For service error events, print the msg only once.
-    if(!msg.contains("SERVICE ERROR")) {
+    if (msg.find("SERVICE ERROR") != std::string::npos) {
         mEventLog.add(msg);
     } else if(sServiceErrorEventSet.find(msg) == sServiceErrorEventSet.end()) {
         // Error event not added to the dumpsys log before
@@ -3439,77 +3516,78 @@
     }
 }
 
-void CameraService::logDisconnected(const char* cameraId, int clientPid,
-        const char* clientPackage) {
+void CameraService::logDisconnected(const std::string &cameraId, int clientPid,
+        const std::string &clientPackage) {
     // Log the clients evicted
-    logEvent(String8::format("DISCONNECT device %s client for package %s (PID %d)", cameraId,
-            clientPackage, clientPid));
+    logEvent(fmt::sprintf("DISCONNECT device %s client for package %s (PID %d)", cameraId.c_str(),
+            clientPackage.c_str(), clientPid));
 }
 
-void CameraService::logDisconnectedOffline(const char* cameraId, int clientPid,
-        const char* clientPackage) {
+void CameraService::logDisconnectedOffline(const std::string &cameraId, int clientPid,
+        const std::string &clientPackage) {
     // Log the clients evicted
-    logEvent(String8::format("DISCONNECT offline device %s client for package %s (PID %d)",
-                cameraId, clientPackage, clientPid));
+    logEvent(fmt::sprintf("DISCONNECT offline device %s client for package %s (PID %d)",
+            cameraId.c_str(), clientPackage.c_str(), clientPid));
 }
 
-void CameraService::logConnected(const char* cameraId, int clientPid,
-        const char* clientPackage) {
+void CameraService::logConnected(const std::string &cameraId, int clientPid,
+        const std::string &clientPackage) {
     // Log the clients evicted
-    logEvent(String8::format("CONNECT device %s client for package %s (PID %d)", cameraId,
-            clientPackage, clientPid));
+    logEvent(fmt::sprintf("CONNECT device %s client for package %s (PID %d)", cameraId.c_str(),
+            clientPackage.c_str(), clientPid));
 }
 
-void CameraService::logConnectedOffline(const char* cameraId, int clientPid,
-        const char* clientPackage) {
+void CameraService::logConnectedOffline(const std::string &cameraId, int clientPid,
+        const std::string &clientPackage) {
     // Log the clients evicted
-    logEvent(String8::format("CONNECT offline device %s client for package %s (PID %d)", cameraId,
-            clientPackage, clientPid));
+    logEvent(fmt::sprintf("CONNECT offline device %s client for package %s (PID %d)",
+            cameraId.c_str(), clientPackage.c_str(), clientPid));
 }
 
-void CameraService::logRejected(const char* cameraId, int clientPid,
-        const char* clientPackage, const char* reason) {
+void CameraService::logRejected(const std::string &cameraId, int clientPid,
+        const std::string &clientPackage, const std::string &reason) {
     // Log the client rejected
-    logEvent(String8::format("REJECT device %s client for package %s (PID %d), reason: (%s)",
-            cameraId, clientPackage, clientPid, reason));
+    logEvent(fmt::sprintf("REJECT device %s client for package %s (PID %d), reason: (%s)",
+            cameraId.c_str(), clientPackage.c_str(), clientPid, reason.c_str()));
 }
 
-void CameraService::logTorchEvent(const char* cameraId, const char *torchState, int clientPid) {
+void CameraService::logTorchEvent(const std::string &cameraId, const std::string &torchState,
+        int clientPid) {
     // Log torch event
-    logEvent(String8::format("Torch for camera id %s turned %s for client PID %d", cameraId,
-            torchState, clientPid));
+    logEvent(fmt::sprintf("Torch for camera id %s turned %s for client PID %d", cameraId.c_str(),
+            torchState.c_str(), clientPid));
 }
 
 void CameraService::logUserSwitch(const std::set<userid_t>& oldUserIds,
         const std::set<userid_t>& newUserIds) {
-    String8 newUsers = toString(newUserIds);
-    String8 oldUsers = toString(oldUserIds);
+    std::string newUsers = toString(newUserIds);
+    std::string oldUsers = toString(oldUserIds);
     if (oldUsers.size() == 0) {
         oldUsers = "<None>";
     }
     // Log the new and old users
-    logEvent(String8::format("USER_SWITCH previous allowed user IDs: %s, current allowed user IDs: %s",
-            oldUsers.string(), newUsers.string()));
+    logEvent(fmt::sprintf("USER_SWITCH previous allowed user IDs: %s, current allowed user IDs: %s",
+            oldUsers.c_str(), newUsers.c_str()));
 }
 
-void CameraService::logDeviceRemoved(const char* cameraId, const char* reason) {
+void CameraService::logDeviceRemoved(const std::string &cameraId, const std::string &reason) {
     // Log the device removal
-    logEvent(String8::format("REMOVE device %s, reason: (%s)", cameraId, reason));
+    logEvent(fmt::sprintf("REMOVE device %s, reason: (%s)", cameraId.c_str(), reason.c_str()));
 }
 
-void CameraService::logDeviceAdded(const char* cameraId, const char* reason) {
+void CameraService::logDeviceAdded(const std::string &cameraId, const std::string &reason) {
     // Log the device removal
-    logEvent(String8::format("ADD device %s, reason: (%s)", cameraId, reason));
+    logEvent(fmt::sprintf("ADD device %s, reason: (%s)", cameraId.c_str(), reason.c_str()));
 }
 
-void CameraService::logClientDied(int clientPid, const char* reason) {
+void CameraService::logClientDied(int clientPid, const std::string &reason) {
     // Log the device removal
-    logEvent(String8::format("DIED client(s) with PID %d, reason: (%s)", clientPid, reason));
+    logEvent(fmt::sprintf("DIED client(s) with PID %d, reason: (%s)", clientPid, reason.c_str()));
 }
 
-void CameraService::logServiceError(const char* msg, int errorCode) {
-    String8 curTime = getFormattedCurrentTime();
-    logEvent(String8::format("SERVICE ERROR: %s : %d (%s)", msg, errorCode, strerror(-errorCode)));
+void CameraService::logServiceError(const std::string &msg, int errorCode) {
+    logEvent(fmt::sprintf("SERVICE ERROR: %s : %d (%s)", msg.c_str(), errorCode,
+            strerror(-errorCode)));
 }
 
 status_t CameraService::onTransact(uint32_t code, const Parcel& data, Parcel* reply,
@@ -3629,9 +3707,9 @@
 
 CameraService::Client::Client(const sp<CameraService>& cameraService,
         const sp<ICameraClient>& cameraClient,
-        const String16& clientPackageName, bool systemNativeClient,
-        const std::optional<String16>& clientFeatureId,
-        const String8& cameraIdStr,
+        const std::string& clientPackageName, bool systemNativeClient,
+        const std::optional<std::string>& clientFeatureId,
+        const std::string& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
         int servicePid, bool overrideToPortrait) :
@@ -3667,8 +3745,8 @@
 
 CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
         const sp<IBinder>& remoteCallback,
-        const String16& clientPackageName, bool nativeClient,
-        const std::optional<String16>& clientFeatureId, const String8& cameraIdStr,
+        const std::string& clientPackageName, bool nativeClient,
+        const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, bool overrideToPortrait):
         mDestructionStarted(false),
@@ -3719,9 +3797,9 @@
     mDisconnected = true;
 
     sCameraService->removeByClient(this);
-    sCameraService->logDisconnected(mCameraIdStr, mClientPid, String8(mClientPackageName));
+    sCameraService->logDisconnected(mCameraIdStr, mClientPid, mClientPackageName);
     sCameraService->mCameraProviderManager->removeRef(CameraProviderManager::DeviceMode::CAMERA,
-            mCameraIdStr.c_str());
+            mCameraIdStr);
 
     sp<IBinder> remote = getRemote();
     if (remote != nullptr) {
@@ -3731,7 +3809,7 @@
     finishCameraOps();
     // Notify flashlight that a camera device is closed.
     sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
-    ALOGI("%s: Disconnected client for camera %s for PID %d", __FUNCTION__, mCameraIdStr.string(),
+    ALOGI("%s: Disconnected client for camera %s for PID %d", __FUNCTION__, mCameraIdStr.c_str(),
             mClientPid);
 
     // client shouldn't be able to call into us anymore
@@ -3755,7 +3833,7 @@
     return OK;
 }
 
-status_t CameraService::BasicClient::startWatchingTags(const String8&, int) {
+status_t CameraService::BasicClient::startWatchingTags(const std::string&, int) {
     // Can't watch tags directly, must go through CameraService::startWatchingTags
     return OK;
 }
@@ -3770,7 +3848,7 @@
     return OK;
 }
 
-String16 CameraService::BasicClient::getPackageName() const {
+std::string CameraService::BasicClient::getPackageName() const {
     return mClientPackageName;
 }
 
@@ -3827,7 +3905,7 @@
 status_t CameraService::BasicClient::handleAppOpMode(int32_t mode) {
     if (mode == AppOpsManager::MODE_ERRORED) {
         ALOGI("Camera %s: Access for \"%s\" has been revoked",
-                mCameraIdStr.string(), String8(mClientPackageName).string());
+                mCameraIdStr.c_str(), mClientPackageName.c_str());
         return PERMISSION_DENIED;
     } else if (!mUidIsTrusted && mode == AppOpsManager::MODE_IGNORED) {
         // If the calling Uid is trusted (a native service), the AppOpsManager could
@@ -3843,7 +3921,7 @@
         // capabilities are unknown.
         if (!isUidActive || !isCameraPrivacyEnabled) {
             ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
+                    mCameraIdStr.c_str(), mClientPackageName.c_str());
             // Return the same error as for device policy manager rejection
             return -EACCES;
         }
@@ -3856,18 +3934,18 @@
 
     {
         ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
-              __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+              __FUNCTION__, mClientPackageName.c_str(), mClientUid);
     }
     if (mAppOpsManager != nullptr) {
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
         mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
-                mClientPackageName, mOpsCallback);
+                toString16(mClientPackageName), mOpsCallback);
 
         // Just check for camera acccess here on open - delay startOp until
         // camera frames start streaming in startCameraStreamingOps
         int32_t mode = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA, mClientUid,
-                mClientPackageName);
+                toString16(mClientPackageName));
         status_t res = handleAppOpMode(mode);
         if (res != OK) {
             return res;
@@ -3900,12 +3978,13 @@
     }
 
     ALOGV("%s: Start camera streaming ops, package name = %s, client UID = %d",
-            __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+            __FUNCTION__, mClientPackageName.c_str(), mClientUid);
 
     if (mAppOpsManager != nullptr) {
         int32_t mode = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA, mClientUid,
-                mClientPackageName, /*startIfModeDefault*/ false, mClientFeatureId,
-                String16("start camera ") + String16(mCameraIdStr));
+                toString16(mClientPackageName), /*startIfModeDefault*/ false,
+                toString16(mClientFeatureId),
+                toString16("start camera ") + toString16(mCameraIdStr));
         status_t res = handleAppOpMode(mode);
         if (res != OK) {
             return res;
@@ -3921,14 +4000,14 @@
     ATRACE_CALL();
 
     ALOGV("%s: Start camera noteAppOp, package name = %s, client UID = %d",
-            __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+            __FUNCTION__, mClientPackageName.c_str(), mClientUid);
 
     // noteAppOp is only used for when camera mute is not supported, in order
     // to trigger the sensor privacy "Unblock" dialog
     if (mAppOpsManager != nullptr) {
         int32_t mode = mAppOpsManager->noteOp(AppOpsManager::OP_CAMERA, mClientUid,
-                mClientPackageName, mClientFeatureId,
-                String16("start camera ") + String16(mCameraIdStr));
+                toString16(mClientPackageName), toString16(mClientFeatureId),
+                toString16("start camera ") + toString16(mCameraIdStr));
         status_t res = handleAppOpMode(mode);
         if (res != OK) {
             return res;
@@ -3952,7 +4031,7 @@
 
     if (mAppOpsManager != nullptr) {
         mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
-                mClientPackageName, mClientFeatureId);
+                toString16(mClientPackageName), toString16(mClientFeatureId));
         mOpsStreaming = false;
     }
 
@@ -4008,7 +4087,7 @@
 
     int32_t res;
     res = mAppOpsManager->checkOp(AppOpsManager::OP_CAMERA,
-            mClientUid, mClientPackageName);
+            mClientUid, toString16(mClientPackageName));
     ALOGV("checkOp returns: %d, %s ", res,
             res == AppOpsManager::MODE_ALLOWED ? "ALLOWED" :
             res == AppOpsManager::MODE_IGNORED ? "IGNORED" :
@@ -4016,15 +4095,15 @@
             "UNKNOWN");
 
     if (res == AppOpsManager::MODE_ERRORED) {
-        ALOGI("Camera %s: Access for \"%s\" revoked", mCameraIdStr.string(),
-              String8(mClientPackageName).string());
+        ALOGI("Camera %s: Access for \"%s\" revoked", mCameraIdStr.c_str(),
+              mClientPackageName.c_str());
         block();
     } else if (res == AppOpsManager::MODE_IGNORED) {
         bool isUidActive = sCameraService->mUidPolicy->isUidActive(mClientUid, mClientPackageName);
         bool isCameraPrivacyEnabled =
                 sCameraService->mSensorPrivacyPolicy->isCameraPrivacyEnabled();
         ALOGI("Camera %s: Access for \"%s\" has been restricted, isUidTrusted %d, isUidActive %d",
-                mCameraIdStr.string(), String8(mClientPackageName).string(),
+                mCameraIdStr.c_str(), mClientPackageName.c_str(),
                 mUidIsTrusted, isUidActive);
         // If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
         // b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
@@ -4105,7 +4184,7 @@
             | ActivityManager::UID_OBSERVER_ACTIVE | ActivityManager::UID_OBSERVER_PROCSTATE
             | ActivityManager::UID_OBSERVER_PROC_OOM_ADJ,
             ActivityManager::PROCESS_STATE_UNKNOWN,
-            String16("cameraserver"), emptyUidArray, 0, mObserverToken);
+            toString16(kServiceName), emptyUidArray, 0, mObserverToken);
     if (res == OK) {
         mRegistered = true;
         ALOGV("UidPolicy: Registered with ActivityManager");
@@ -4115,7 +4194,7 @@
 }
 
 void CameraService::UidPolicy::onServiceRegistration(const String16& name, const sp<IBinder>&) {
-    if (name != String16(kActivityServiceName)) {
+    if (name != toString16(kActivityServiceName)) {
         return;
     }
 
@@ -4127,9 +4206,9 @@
     // If not available then register for notifications, instead of blocking
     // till the service is ready
     sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->checkService(String16(kActivityServiceName));
+    sp<IBinder> binder = sm->checkService(toString16(kActivityServiceName));
     if (!binder) {
-        sm->registerForNotifications(String16(kActivityServiceName), this);
+        sm->registerForNotifications(toString16(kActivityServiceName), this);
     } else {
         registerWithActivityManager();
     }
@@ -4248,7 +4327,7 @@
         monitoredUid.procAdj = resource_policy::UNKNOWN_ADJ;
         monitoredUid.refCount = 1;
         it = mMonitoredUids.emplace(std::pair<uid_t, MonitoredUid>(uid, monitoredUid)).first;
-        status_t res = mAm.addUidToObserver(mObserverToken, String16("cameraserver"), uid);
+        status_t res = mAm.addUidToObserver(mObserverToken, toString16(kServiceName), uid);
         if (res != OK) {
             ALOGE("UidPolicy: Failed to add uid to observer: 0x%08x", res);
         }
@@ -4269,7 +4348,7 @@
         it->second.refCount--;
         if (it->second.refCount == 0) {
             mMonitoredUids.erase(it);
-            status_t res = mAm.removeUidFromObserver(mObserverToken, String16("cameraserver"), uid);
+            status_t res = mAm.removeUidFromObserver(mObserverToken, toString16(kServiceName), uid);
             if (res != OK) {
                 ALOGE("UidPolicy: Failed to remove uid from observer: 0x%08x", res);
             }
@@ -4281,7 +4360,7 @@
     }
 }
 
-bool CameraService::UidPolicy::isUidActive(uid_t uid, String16 callingPackage) {
+bool CameraService::UidPolicy::isUidActive(uid_t uid, const std::string &callingPackage) {
     Mutex::Autolock _l(mUidLock);
     return isUidActiveLocked(uid, callingPackage);
 }
@@ -4289,7 +4368,7 @@
 static const int64_t kPollUidActiveTimeoutTotalMillis = 300;
 static const int64_t kPollUidActiveTimeoutMillis = 50;
 
-bool CameraService::UidPolicy::isUidActiveLocked(uid_t uid, String16 callingPackage) {
+bool CameraService::UidPolicy::isUidActiveLocked(uid_t uid, const std::string &callingPackage) {
     // Non-app UIDs are considered always active
     // If activity manager is unreachable, assume everything is active
     if (uid < FIRST_APPLICATION_UID || !mRegistered) {
@@ -4316,7 +4395,7 @@
             // some polling which should happen pretty rarely anyway as the race is hard
             // to hit.
             active = mActiveUids.find(uid) != mActiveUids.end();
-            if (!active) active = am.isUidActive(uid, callingPackage);
+            if (!active) active = am.isUidActive(uid, toString16(callingPackage));
             if (active) {
                 break;
             }
@@ -4356,12 +4435,12 @@
     return procState;
 }
 
-void CameraService::UidPolicy::UidPolicy::addOverrideUid(uid_t uid,
-        String16 callingPackage, bool active) {
+void CameraService::UidPolicy::addOverrideUid(uid_t uid,
+        const std::string &callingPackage, bool active) {
     updateOverrideUid(uid, callingPackage, active, true);
 }
 
-void CameraService::UidPolicy::removeOverrideUid(uid_t uid, String16 callingPackage) {
+void CameraService::UidPolicy::removeOverrideUid(uid_t uid, const std::string &callingPackage) {
     updateOverrideUid(uid, callingPackage, false, false);
 }
 
@@ -4372,7 +4451,7 @@
     mActiveUids.clear();
 }
 
-void CameraService::UidPolicy::updateOverrideUid(uid_t uid, String16 callingPackage,
+void CameraService::UidPolicy::updateOverrideUid(uid_t uid, const std::string &callingPackage,
         bool active, bool insert) {
     bool wasActive = false;
     bool isActive = false;
@@ -4415,7 +4494,7 @@
 
 void CameraService::SensorPrivacyPolicy::onServiceRegistration(const String16& name,
                                                                const sp<IBinder>&) {
-    if (name != String16(kSensorPrivacyServiceName)) {
+    if (name != toString16(kSensorPrivacyServiceName)) {
         return;
     }
 
@@ -4426,9 +4505,9 @@
     // Use checkservice to see if the sensor_privacy service is available
     // If service is not available then register for notification
     sp<IServiceManager> sm = defaultServiceManager();
-    sp<IBinder> binder = sm->checkService(String16(kSensorPrivacyServiceName));
+    sp<IBinder> binder = sm->checkService(toString16(kSensorPrivacyServiceName));
     if (!binder) {
-        sm->registerForNotifications(String16(kSensorPrivacyServiceName),this);
+        sm->registerForNotifications(toString16(kSensorPrivacyServiceName),this);
     } else {
         registerWithSensorPrivacyManager();
     }
@@ -4492,8 +4571,8 @@
 //                  CameraState
 // ----------------------------------------------------------------------------
 
-CameraService::CameraState::CameraState(const String8& id, int cost,
-        const std::set<String8>& conflicting, SystemCameraKind systemCameraKind,
+CameraService::CameraState::CameraState(const std::string& id, int cost,
+        const std::set<std::string>& conflicting, SystemCameraKind systemCameraKind,
         const std::vector<std::string>& physicalCameras) : mId(id),
         mStatus(StatusInternal::NOT_PRESENT), mCost(cost), mConflicting(conflicting),
         mSystemCameraKind(systemCameraKind), mPhysicalCameras(physicalCameras) {}
@@ -4505,9 +4584,9 @@
     return mStatus;
 }
 
-std::vector<String8> CameraService::CameraState::getUnavailablePhysicalIds() const {
+std::vector<std::string> CameraService::CameraState::getUnavailablePhysicalIds() const {
     Mutex::Autolock lock(mStatusLock);
-    std::vector<String8> res(mUnavailablePhysicalIds.begin(), mUnavailablePhysicalIds.end());
+    std::vector<std::string> res(mUnavailablePhysicalIds.begin(), mUnavailablePhysicalIds.end());
     return res;
 }
 
@@ -4523,14 +4602,10 @@
     return mCost;
 }
 
-std::set<String8> CameraService::CameraState::getConflicting() const {
+std::set<std::string> CameraService::CameraState::getConflicting() const {
     return mConflicting;
 }
 
-String8 CameraService::CameraState::getId() const {
-    return mId;
-}
-
 SystemCameraKind CameraService::CameraState::getSystemCameraKind() const {
     return mSystemCameraKind;
 }
@@ -4540,24 +4615,24 @@
             != mPhysicalCameras.end();
 }
 
-bool CameraService::CameraState::addUnavailablePhysicalId(const String8& physicalId) {
+bool CameraService::CameraState::addUnavailablePhysicalId(const std::string& physicalId) {
     Mutex::Autolock lock(mStatusLock);
     auto result = mUnavailablePhysicalIds.insert(physicalId);
     return result.second;
 }
 
-bool CameraService::CameraState::removeUnavailablePhysicalId(const String8& physicalId) {
+bool CameraService::CameraState::removeUnavailablePhysicalId(const std::string& physicalId) {
     Mutex::Autolock lock(mStatusLock);
     auto count = mUnavailablePhysicalIds.erase(physicalId);
     return count > 0;
 }
 
-void CameraService::CameraState::setClientPackage(const String8& clientPackage) {
+void CameraService::CameraState::setClientPackage(const std::string& clientPackage) {
     Mutex::Autolock lock(mStatusLock);
     mClientPackage = clientPackage;
 }
 
-String8 CameraService::CameraState::getClientPackage() const {
+std::string CameraService::CameraState::getClientPackage() const {
     Mutex::Autolock lock(mStatusLock);
     return mClientPackage;
 }
@@ -4567,23 +4642,23 @@
 // ----------------------------------------------------------------------------
 
 void CameraService::ClientEventListener::onClientAdded(
-        const resource_policy::ClientDescriptor<String8,
+        const resource_policy::ClientDescriptor<std::string,
         sp<CameraService::BasicClient>>& descriptor) {
     const auto& basicClient = descriptor.getValue();
     if (basicClient.get() != nullptr) {
         BatteryNotifier& notifier(BatteryNotifier::getInstance());
-        notifier.noteStartCamera(descriptor.getKey(),
+        notifier.noteStartCamera(toString8(descriptor.getKey()),
                 static_cast<int>(basicClient->getClientUid()));
     }
 }
 
 void CameraService::ClientEventListener::onClientRemoved(
-        const resource_policy::ClientDescriptor<String8,
+        const resource_policy::ClientDescriptor<std::string,
         sp<CameraService::BasicClient>>& descriptor) {
     const auto& basicClient = descriptor.getValue();
     if (basicClient.get() != nullptr) {
         BatteryNotifier& notifier(BatteryNotifier::getInstance());
-        notifier.noteStopCamera(descriptor.getKey(),
+        notifier.noteStopCamera(toString8(descriptor.getKey()),
                 static_cast<int>(basicClient->getClientUid()));
     }
 }
@@ -4600,7 +4675,7 @@
 CameraService::CameraClientManager::~CameraClientManager() {}
 
 sp<CameraService::BasicClient> CameraService::CameraClientManager::getCameraClient(
-        const String8& id) const {
+        const std::string& id) const {
     auto descriptor = get(id);
     if (descriptor == nullptr) {
         return sp<BasicClient>{nullptr};
@@ -4608,56 +4683,57 @@
     return descriptor->getValue();
 }
 
-String8 CameraService::CameraClientManager::toString() const {
+std::string CameraService::CameraClientManager::toString() const {
     auto all = getAll();
-    String8 ret("[");
+    std::ostringstream ret;
+    ret << "[";
     bool hasAny = false;
     for (auto& i : all) {
         hasAny = true;
-        String8 key = i->getKey();
+        std::string key = i->getKey();
         int32_t cost = i->getCost();
         int32_t pid = i->getOwnerId();
         int32_t score = i->getPriority().getScore();
         int32_t state = i->getPriority().getState();
         auto conflicting = i->getConflicting();
         auto clientSp = i->getValue();
-        String8 packageName;
+        std::string packageName;
         userid_t clientUserId = 0;
         if (clientSp.get() != nullptr) {
-            packageName = String8{clientSp->getPackageName()};
+            packageName = clientSp->getPackageName();
             uid_t clientUid = clientSp->getClientUid();
             clientUserId = multiuser_get_user_id(clientUid);
         }
-        ret.appendFormat("\n(Camera ID: %s, Cost: %" PRId32 ", PID: %" PRId32 ", Score: %"
-                PRId32 ", State: %" PRId32, key.string(), cost, pid, score, state);
+        ret << fmt::sprintf("\n(Camera ID: %s, Cost: %" PRId32 ", PID: %" PRId32 ", Score: %"
+                PRId32 ", State: %" PRId32, key.c_str(), cost, pid, score, state);
 
         if (clientSp.get() != nullptr) {
-            ret.appendFormat("User Id: %d, ", clientUserId);
+            ret << fmt::sprintf("User Id: %d, ", clientUserId);
         }
         if (packageName.size() != 0) {
-            ret.appendFormat("Client Package Name: %s", packageName.string());
+            ret << fmt::sprintf("Client Package Name: %s", packageName.c_str());
         }
 
-        ret.append(", Conflicting Client Devices: {");
+        ret << ", Conflicting Client Devices: {";
         for (auto& j : conflicting) {
-            ret.appendFormat("%s, ", j.string());
+            ret << fmt::sprintf("%s, ", j.c_str());
         }
-        ret.append("})");
+        ret << "})";
     }
-    if (hasAny) ret.append("\n");
-    ret.append("]\n");
-    return ret;
+    if (hasAny) ret << "\n";
+    ret << "]\n";
+    return std::move(ret.str());
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
-        const String8& key, const sp<BasicClient>& value, int32_t cost,
-        const std::set<String8>& conflictingKeys, int32_t score, int32_t ownerId,
+        const std::string& key, const sp<BasicClient>& value, int32_t cost,
+        const std::set<std::string>& conflictingKeys, int32_t score, int32_t ownerId,
         int32_t state, int32_t oomScoreOffset, bool systemNativeClient) {
 
     int32_t score_adj = systemNativeClient ? kSystemNativeClientScore : score;
-    int32_t state_adj = systemNativeClient ? kSystemNativeClientState: state;
+    int32_t state_adj = systemNativeClient ? kSystemNativeClientState : state;
 
-    return std::make_shared<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>(
+    return std::make_shared<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>(
             key, value, cost, conflictingKeys, score_adj, ownerId, state_adj,
             systemNativeClient, oomScoreOffset);
 }
@@ -4696,7 +4772,7 @@
 }
 
 void CameraService::InjectionStatusListener::notifyInjectionError(
-        String8 injectedCamId, status_t err) {
+        const std::string &injectedCamId, status_t err) {
     if (mCameraInjectionCallback == nullptr) {
         ALOGW("InjectionStatusListener: mCameraInjectionCallback == nullptr");
         return;
@@ -4707,37 +4783,37 @@
             mCameraInjectionCallback->onInjectionError(
                     ICameraInjectionCallback::ERROR_INJECTION_SESSION);
             ALOGE("No camera device with ID \"%s\" currently available!",
-                    injectedCamId.string());
+                    injectedCamId.c_str());
             break;
         case -EBUSY:
             mCameraInjectionCallback->onInjectionError(
                     ICameraInjectionCallback::ERROR_INJECTION_SESSION);
             ALOGE("Higher-priority client using camera, ID \"%s\" currently unavailable!",
-                    injectedCamId.string());
+                    injectedCamId.c_str());
             break;
         case DEAD_OBJECT:
             mCameraInjectionCallback->onInjectionError(
                     ICameraInjectionCallback::ERROR_INJECTION_SESSION);
             ALOGE("Camera ID \"%s\" object is dead!",
-                    injectedCamId.string());
+                    injectedCamId.c_str());
             break;
         case INVALID_OPERATION:
             mCameraInjectionCallback->onInjectionError(
                     ICameraInjectionCallback::ERROR_INJECTION_SESSION);
             ALOGE("Camera ID \"%s\" encountered an operating or internal error!",
-                    injectedCamId.string());
+                    injectedCamId.c_str());
             break;
         case UNKNOWN_TRANSACTION:
             mCameraInjectionCallback->onInjectionError(
                     ICameraInjectionCallback::ERROR_INJECTION_UNSUPPORTED);
             ALOGE("Camera ID \"%s\" method doesn't support!",
-                    injectedCamId.string());
+                    injectedCamId.c_str());
             break;
         default:
             mCameraInjectionCallback->onInjectionError(
                     ICameraInjectionCallback::ERROR_INJECTION_INVALID_ERROR);
             ALOGE("Unexpected error %s (%d) opening camera \"%s\"!",
-                    strerror(-err), err, injectedCamId.string());
+                    strerror(-err), err, injectedCamId.c_str());
     }
 }
 
@@ -4813,10 +4889,10 @@
         Mutex::Autolock l(mCameraStatesLock);
         // Start collecting the info for open sessions and store it in temp file.
         for (const auto& state : mCameraStates) {
-            String8 cameraId = state.first;
+            std::string cameraId = state.first;
             auto clientDescriptor = mActiveClientManager.get(cameraId);
             if (clientDescriptor != nullptr) {
-                dprintf(mMemFd, "== Camera device %s dynamic info: ==\n", cameraId.string());
+                dprintf(mMemFd, "== Camera device %s dynamic info: ==\n", cameraId.c_str());
                 // Log the current open session info before device is disconnected.
                 dumpOpenSessionClientLogs(mMemFd, args, cameraId);
             }
@@ -4827,7 +4903,7 @@
 status_t CameraService::dump(int fd, const Vector<String16>& args) {
     ATRACE_CALL();
 
-    if (checkCallingPermission(sDumpPermission) == false) {
+    if (checkCallingPermission(toString16(sDumpPermission)) == false) {
         dprintf(fd, "Permission Denial: can't dump CameraService from pid=%d, uid=%d\n",
                 CameraThreadState::getCallingPid(),
                 CameraThreadState::getCallingUid());
@@ -4856,9 +4932,9 @@
     for (size_t i = 0; i < mNormalDeviceIds.size(); i++) {
         dprintf(fd, "    Device %zu maps to \"%s\"\n", i, mNormalDeviceIds[i].c_str());
     }
-    String8 activeClientString = mActiveClientManager.toString();
-    dprintf(fd, "Active Camera Clients:\n%s", activeClientString.string());
-    dprintf(fd, "Allowed user IDs: %s\n", toString(mAllowedUsers).string());
+    std::string activeClientString = mActiveClientManager.toString();
+    dprintf(fd, "Active Camera Clients:\n%s", activeClientString.c_str());
+    dprintf(fd, "Allowed user IDs: %s\n", toString(mAllowedUsers).c_str());
     if (mStreamUseCaseOverrides.size() > 0) {
         dprintf(fd, "Active stream use case overrides:");
         for (int64_t useCaseOverride : mStreamUseCaseOverrides) {
@@ -4876,18 +4952,18 @@
 
     int argSize = args.size();
     for (int i = 0; i < argSize; i++) {
-        if (args[i] == TagMonitor::kMonitorOption) {
+        if (args[i] == toString16(TagMonitor::kMonitorOption)) {
             if (i + 1 < argSize) {
-                mMonitorTags = String8(args[i + 1]);
+                mMonitorTags = toStdString(args[i + 1]);
             }
             break;
         }
     }
 
     for (auto& state : mCameraStates) {
-        String8 cameraId = state.first;
+        const std::string &cameraId = state.first;
 
-        dprintf(fd, "== Camera device %s dynamic info: ==\n", cameraId.string());
+        dprintf(fd, "== Camera device %s dynamic info: ==\n", cameraId.c_str());
 
         CameraParameters p = state.second->getShimParams();
         if (!p.isEmpty()) {
@@ -4938,8 +5014,8 @@
         if (args[i] == verboseOption) {
             // change logging level
             if (i + 1 >= n) continue;
-            String8 levelStr(args[i+1]);
-            int level = atoi(levelStr.string());
+            std::string levelStr = toStdString(args[i+1]);
+            int level = atoi(levelStr.c_str());
             dprintf(fd, "\nSetting log level to %d.\n", level);
             setLogLevel(level);
         } else if (args[i] == unreachableOption) {
@@ -4985,11 +5061,11 @@
 }
 
 void CameraService::dumpOpenSessionClientLogs(int fd,
-        const Vector<String16>& args, const String8& cameraId) {
+        const Vector<String16>& args, const std::string& cameraId) {
     auto clientDescriptor = mActiveClientManager.get(cameraId);
     dprintf(fd, "  %s : Device %s is open. Client instance dump:\n",
-            getFormattedCurrentTime().string(),
-            cameraId.string());
+            getFormattedCurrentTime().c_str(),
+            cameraId.c_str());
     dprintf(fd, "    Client priority score: %d state: %d\n",
         clientDescriptor->getPriority().getScore(),
         clientDescriptor->getPriority().getState());
@@ -4997,14 +5073,14 @@
 
     auto client = clientDescriptor->getValue();
     dprintf(fd, "    Client package: %s\n",
-        String8(client->getPackageName()).string());
+        client->getPackageName().c_str());
 
     client->dumpClient(fd, args);
 }
 
-void CameraService::dumpClosedSessionClientLogs(int fd, const String8& cameraId) {
+void CameraService::dumpClosedSessionClientLogs(int fd, const std::string& cameraId) {
     dprintf(fd, "  Device %s is closed, no client instance\n",
-                    cameraId.string());
+                    cameraId.c_str());
 }
 
 void CameraService::dumpEventLog(int fd) {
@@ -5012,7 +5088,7 @@
 
     Mutex::Autolock l(mLogLock);
     for (const auto& msg : mEventLog) {
-        dprintf(fd, "  %s\n", msg.string());
+        dprintf(fd, "  %s\n", msg.c_str());
     }
 
     if (mEventLog.size() == DEFAULT_EVENT_LOG_LENGTH) {
@@ -5023,7 +5099,7 @@
     dprintf(fd, "\n");
 }
 
-void CameraService::cacheClientTagDumpIfNeeded(const char *cameraId, BasicClient* client) {
+void CameraService::cacheClientTagDumpIfNeeded(const std::string &cameraId, BasicClient* client) {
     Mutex::Autolock lock(mLogLock);
     if (!isClientWatchedLocked(client)) { return; }
 
@@ -5032,34 +5108,26 @@
 
     if (dumpVector.empty()) { return; }
 
-    std::string dumpString;
+    std::ostringstream dumpString;
 
-    String8 currentTime = getFormattedCurrentTime();
-    dumpString += "Cached @ ";
-    dumpString += currentTime.string();
-    dumpString += "\n"; // First line is the timestamp of when client is cached.
-
-
-    const String16 &packageName = client->getPackageName();
-
-    String8 packageName8 = String8(packageName);
-    const char *printablePackageName = packageName8.lockBuffer(packageName.size());
-
+    std::string currentTime = getFormattedCurrentTime();
+    dumpString << "Cached @ ";
+    dumpString << currentTime;
+    dumpString << "\n"; // First line is the timestamp of when client is cached.
 
     size_t i = dumpVector.size();
 
     // Store the string in reverse order (latest last)
     while (i > 0) {
          i--;
-         dumpString += cameraId;
-         dumpString += ":";
-         dumpString += printablePackageName;
-         dumpString += "  ";
-         dumpString += dumpVector[i]; // implicitly ends with '\n'
+         dumpString << cameraId;
+         dumpString << ":";
+         dumpString << client->getPackageName();
+         dumpString << "  ";
+         dumpString << dumpVector[i]; // implicitly ends with '\n'
     }
 
-    packageName8.unlockBuffer();
-    mWatchedClientsDumpCache[packageName] = dumpString;
+    mWatchedClientsDumpCache[client->getPackageName()] = dumpString.str();
 }
 
 void CameraService::handleTorchClientBinderDied(const wp<IBinder> &who) {
@@ -5067,7 +5135,7 @@
     for (size_t i = 0; i < mTorchClientMap.size(); i++) {
         if (mTorchClientMap[i] == who) {
             // turn off the torch mode that was turned on by dead client
-            String8 cameraId = mTorchClientMap.keyAt(i);
+            std::string cameraId = mTorchClientMap.keyAt(i);
             status_t res = mFlashlight->setTorchMode(cameraId, false);
             if (res) {
                 ALOGE("%s: torch client died but couldn't turn off torch: "
@@ -5087,7 +5155,7 @@
       * binder driver
       */
     // PID here is approximate and can be wrong.
-    logClientDied(CameraThreadState::getCallingPid(), String8("Binder died unexpectedly"));
+    logClientDied(CameraThreadState::getCallingPid(), "Binder died unexpectedly");
 
     // check torch client
     handleTorchClientBinderDied(who);
@@ -5102,11 +5170,11 @@
             __FUNCTION__);
 }
 
-void CameraService::updateStatus(StatusInternal status, const String8& cameraId) {
+void CameraService::updateStatus(StatusInternal status, const std::string& cameraId) {
     updateStatus(status, cameraId, {});
 }
 
-void CameraService::updateStatus(StatusInternal status, const String8& cameraId,
+void CameraService::updateStatus(StatusInternal status, const std::string& cameraId,
         std::initializer_list<StatusInternal> rejectSourceStates) {
     // Do not lock mServiceLock here or can get into a deadlock from
     // connect() -> disconnect -> updateStatus
@@ -5115,14 +5183,14 @@
 
     if (state == nullptr) {
         ALOGW("%s: Could not update the status for %s, no such device exists", __FUNCTION__,
-                cameraId.string());
+                cameraId.c_str());
         return;
     }
 
     // Avoid calling getSystemCameraKind() with mStatusListenerLock held (b/141756275)
     SystemCameraKind deviceKind = SystemCameraKind::PUBLIC;
     if (getSystemCameraKind(cameraId, &deviceKind) != OK) {
-        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.string());
+        ALOGE("%s: Invalid camera id %s, skipping", __FUNCTION__, cameraId.c_str());
         return;
     }
 
@@ -5133,7 +5201,8 @@
     // of the listeners with both the mStatusLock and mStatusListenerLock held
     state->updateStatus(status, cameraId, rejectSourceStates, [this, &deviceKind,
                         &logicalCameraIds]
-            (const String8& cameraId, StatusInternal status) {
+            (const std::string& cameraId, StatusInternal status) {
+
             if (status != StatusInternal::ENUMERATING) {
                 // Update torch status if it has a flash unit.
                 Mutex::Autolock al(mTorchStatusMutex);
@@ -5151,7 +5220,7 @@
             }
 
             Mutex::Autolock lock(mStatusListenerLock);
-            notifyPhysicalCameraStatusLocked(mapToInterface(status), String16(cameraId),
+            notifyPhysicalCameraStatusLocked(mapToInterface(status), cameraId,
                     logicalCameraIds, deviceKind);
 
             for (auto& listener : mListenerList) {
@@ -5164,18 +5233,18 @@
                     continue;
                 }
                 auto ret = listener->getListener()->onStatusChanged(mapToInterface(status),
-                        String16(cameraId));
+                        cameraId);
                 listener->handleBinderStatus(ret,
                          "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                         __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                         ret.exceptionCode());
                 // Also trigger the callbacks for cameras that were remapped to the current
                 // cameraId for the specific package that this listener belongs to.
-                std::vector<String8> remappedCameraIds =
+                std::vector<std::string> remappedCameraIds =
                         findOriginalIdsForRemappedCameraId(cameraId, listener->getListenerUid());
                 for (auto& remappedCameraId : remappedCameraIds) {
                     ret = listener->getListener()->onStatusChanged(
-                            mapToInterface(status), String16(remappedCameraId));
+                            mapToInterface(status), remappedCameraId);
                     listener->handleBinderStatus(ret,
                              "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                             __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
@@ -5185,18 +5254,18 @@
         });
 }
 
-void CameraService::updateOpenCloseStatus(const String8& cameraId, bool open,
-        const String16& clientPackageName) {
+void CameraService::updateOpenCloseStatus(const std::string& cameraId, bool open,
+        const std::string& clientPackageName) {
     auto state = getCameraState(cameraId);
     if (state == nullptr) {
         ALOGW("%s: Could not update the status for %s, no such device exists", __FUNCTION__,
-                cameraId.string());
+                cameraId.c_str());
         return;
     }
     if (open) {
-        state->setClientPackage(String8(clientPackageName));
+        state->setClientPackage(clientPackageName);
     } else {
-        state->setClientPackage(String8::empty());
+        state->setClientPackage(std::string());
     }
 
     Mutex::Autolock lock(mStatusListenerLock);
@@ -5207,11 +5276,10 @@
         }
 
         binder::Status ret;
-        String16 cameraId64(cameraId);
         if (open) {
-            ret = it->getListener()->onCameraOpened(cameraId64, clientPackageName);
+            ret = it->getListener()->onCameraOpened(cameraId, clientPackageName);
         } else {
-            ret = it->getListener()->onCameraClosed(cameraId64);
+            ret = it->getListener()->onCameraClosed(cameraId);
         }
 
         it->handleBinderStatus(ret,
@@ -5222,7 +5290,7 @@
 
 template<class Func>
 void CameraService::CameraState::updateStatus(StatusInternal status,
-        const String8& cameraId,
+        const std::string& cameraId,
         std::initializer_list<StatusInternal> rejectSourceStates,
         Func onStatusUpdatedLocked) {
     Mutex::Autolock lock(mStatusLock);
@@ -5234,7 +5302,7 @@
     }
 
     ALOGV("%s: Status has changed for camera ID %s from %#x to %#x", __FUNCTION__,
-            cameraId.string(), oldStatus, status);
+            cameraId.c_str(), oldStatus, status);
 
     if (oldStatus == StatusInternal::NOT_PRESENT &&
             (status != StatusInternal::PRESENT &&
@@ -5254,7 +5322,7 @@
     for (auto& rejectStatus : rejectSourceStates) {
         if (oldStatus == rejectStatus) {
             ALOGV("%s: Rejecting status transition for Camera ID %s,  since the source "
-                    "state was was in one of the bad states.", __FUNCTION__, cameraId.string());
+                    "state was was in one of the bad states.", __FUNCTION__, cameraId.c_str());
             mStatus = oldStatus;
             return;
         }
@@ -5264,7 +5332,7 @@
 }
 
 status_t CameraService::getTorchStatusLocked(
-        const String8& cameraId,
+        const std::string& cameraId,
         TorchModeStatus *status) const {
     if (!status) {
         return BAD_VALUE;
@@ -5279,7 +5347,7 @@
     return OK;
 }
 
-status_t CameraService::setTorchStatusLocked(const String8& cameraId,
+status_t CameraService::setTorchStatusLocked(const std::string& cameraId,
         TorchModeStatus status) {
     ssize_t index = mTorchStatusMap.indexOfKey(cameraId);
     if (index == NAME_NOT_FOUND) {
@@ -5290,20 +5358,20 @@
     return OK;
 }
 
-std::list<String16> CameraService::getLogicalCameras(
-        const String8& physicalCameraId) {
-    std::list<String16> retList;
+std::list<std::string> CameraService::getLogicalCameras(
+        const std::string& physicalCameraId) {
+    std::list<std::string> retList;
     Mutex::Autolock lock(mCameraStatesLock);
     for (const auto& state : mCameraStates) {
-        if (state.second->containsPhysicalCamera(physicalCameraId.c_str())) {
-            retList.emplace_back(String16(state.first));
+        if (state.second->containsPhysicalCamera(physicalCameraId)) {
+            retList.emplace_back(state.first);
         }
     }
     return retList;
 }
 
 void CameraService::notifyPhysicalCameraStatusLocked(int32_t status,
-        const String16& physicalCameraId, const std::list<String16>& logicalCameraIds,
+        const std::string& physicalCameraId, const std::list<std::string>& logicalCameraIds,
         SystemCameraKind deviceKind) {
     // mStatusListenerLock is expected to be locked
     for (const auto& logicalCameraId : logicalCameraIds) {
@@ -5314,7 +5382,7 @@
             if (shouldSkipStatusUpdates(deviceKind, listener->isVendorListener(),
                     listener->getListenerPid(), listener->getListenerUid())) {
                 ALOGV("Skipping discovery callback for system-only camera device %s",
-                        String8(physicalCameraId).c_str());
+                        physicalCameraId.c_str());
                 continue;
             }
             auto ret = listener->getListener()->onPhysicalCameraStatusChanged(status,
@@ -5354,46 +5422,46 @@
 
 // NOTE: This is a remote API - make sure all args are validated
 status_t CameraService::shellCommand(int in, int out, int err, const Vector<String16>& args) {
-    if (!checkCallingPermission(sManageCameraPermission, nullptr, nullptr)) {
+    if (!checkCallingPermission(toString16(sManageCameraPermission), nullptr, nullptr)) {
         return PERMISSION_DENIED;
     }
     if (in == BAD_TYPE || out == BAD_TYPE || err == BAD_TYPE) {
         return BAD_VALUE;
     }
-    if (args.size() >= 3 && args[0] == String16("set-uid-state")) {
+    if (args.size() >= 3 && args[0] == toString16("set-uid-state")) {
         return handleSetUidState(args, err);
-    } else if (args.size() >= 2 && args[0] == String16("reset-uid-state")) {
+    } else if (args.size() >= 2 && args[0] == toString16("reset-uid-state")) {
         return handleResetUidState(args, err);
-    } else if (args.size() >= 2 && args[0] == String16("get-uid-state")) {
+    } else if (args.size() >= 2 && args[0] == toString16("get-uid-state")) {
         return handleGetUidState(args, out, err);
-    } else if (args.size() >= 2 && args[0] == String16("set-rotate-and-crop")) {
+    } else if (args.size() >= 2 && args[0] == toString16("set-rotate-and-crop")) {
         return handleSetRotateAndCrop(args);
-    } else if (args.size() >= 1 && args[0] == String16("get-rotate-and-crop")) {
+    } else if (args.size() >= 1 && args[0] == toString16("get-rotate-and-crop")) {
         return handleGetRotateAndCrop(out);
-    } else if (args.size() >= 2 && args[0] == String16("set-autoframing")) {
+    } else if (args.size() >= 2 && args[0] == toString16("set-autoframing")) {
         return handleSetAutoframing(args);
-    } else if (args.size() >= 1 && args[0] == String16("get-autoframing")) {
+    } else if (args.size() >= 1 && args[0] == toString16("get-autoframing")) {
         return handleGetAutoframing(out);
-    } else if (args.size() >= 2 && args[0] == String16("set-image-dump-mask")) {
+    } else if (args.size() >= 2 && args[0] == toString16("set-image-dump-mask")) {
         return handleSetImageDumpMask(args);
-    } else if (args.size() >= 1 && args[0] == String16("get-image-dump-mask")) {
+    } else if (args.size() >= 1 && args[0] == toString16("get-image-dump-mask")) {
         return handleGetImageDumpMask(out);
-    } else if (args.size() >= 2 && args[0] == String16("set-camera-mute")) {
+    } else if (args.size() >= 2 && args[0] == toString16("set-camera-mute")) {
         return handleSetCameraMute(args);
-    } else if (args.size() >= 2 && args[0] == String16("set-stream-use-case-override")) {
+    } else if (args.size() >= 2 && args[0] == toString16("set-stream-use-case-override")) {
         return handleSetStreamUseCaseOverrides(args);
-    } else if (args.size() >= 1 && args[0] == String16("clear-stream-use-case-override")) {
+    } else if (args.size() >= 1 && args[0] == toString16("clear-stream-use-case-override")) {
         handleClearStreamUseCaseOverrides();
         return OK;
-    } else if (args.size() >= 1 && args[0] == String16("set-zoom-override")) {
+    } else if (args.size() >= 1 && args[0] == toString16("set-zoom-override")) {
         return handleSetZoomOverride(args);
-    } else if (args.size() >= 2 && args[0] == String16("watch")) {
+    } else if (args.size() >= 2 && args[0] == toString16("watch")) {
         return handleWatchCommand(args, in, out);
-    } else if (args.size() >= 2 && args[0] == String16("set-watchdog")) {
+    } else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
-    } else if (args.size() >= 4 && args[0] == String16("remap-camera-id")) {
+    } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
         return handleCameraIdRemapping(args, err);
-    } else if (args.size() == 1 && args[0] == String16("help")) {
+    } else if (args.size() == 1 && args[0] == toString16("help")) {
         printHelp(out);
         return OK;
     }
@@ -5411,27 +5479,27 @@
         dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
         return BAD_VALUE;
     }
-    String16 packageName = args[1];
-    String8 cameraIdToReplace = String8(args[2]);
-    String8 cameraIdNew = String8(args[3]);
+    std::string packageName = toStdString(args[1]);
+    std::string cameraIdToReplace = toStdString(args[2]);
+    std::string cameraIdNew = toStdString(args[3]);
     remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
     return OK;
 }
 
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
-    String16 packageName = args[1];
+    std::string packageName = toStdString(args[1]);
 
     bool active = false;
-    if (args[2] == String16("active")) {
+    if (args[2] == toString16("active")) {
         active = true;
-    } else if ((args[2] != String16("idle"))) {
-        ALOGE("Expected active or idle but got: '%s'", String8(args[2]).string());
+    } else if ((args[2] != toString16("idle"))) {
+        ALOGE("Expected active or idle but got: '%s'", toStdString(args[2]).c_str());
         return BAD_VALUE;
     }
 
     int userId = 0;
-    if (args.size() >= 5 && args[3] == String16("--user")) {
-        userId = atoi(String8(args[4]));
+    if (args.size() >= 5 && args[3] == toString16("--user")) {
+        userId = atoi(toStdString(args[4]).c_str());
     }
 
     uid_t uid;
@@ -5444,11 +5512,11 @@
 }
 
 status_t CameraService::handleResetUidState(const Vector<String16>& args, int err) {
-    String16 packageName = args[1];
+    std::string packageName = toStdString(args[1]);
 
     int userId = 0;
-    if (args.size() >= 4 && args[2] == String16("--user")) {
-        userId = atoi(String8(args[3]));
+    if (args.size() >= 4 && args[2] == toString16("--user")) {
+        userId = atoi(toStdString(args[3]).c_str());
     }
 
     uid_t uid;
@@ -5461,11 +5529,11 @@
 }
 
 status_t CameraService::handleGetUidState(const Vector<String16>& args, int out, int err) {
-    String16 packageName = args[1];
+    std::string packageName = toStdString(args[1]);
 
     int userId = 0;
-    if (args.size() >= 4 && args[2] == String16("--user")) {
-        userId = atoi(String8(args[3]));
+    if (args.size() >= 4 && args[2] == toString16("--user")) {
+        userId = atoi(toStdString(args[3]).c_str());
     }
 
     uid_t uid;
@@ -5481,7 +5549,7 @@
 }
 
 status_t CameraService::handleSetRotateAndCrop(const Vector<String16>& args) {
-    int rotateValue = atoi(String8(args[1]));
+    int rotateValue = atoi(toStdString(args[1]).c_str());
     if (rotateValue < ANDROID_SCALER_ROTATE_AND_CROP_NONE ||
             rotateValue > ANDROID_SCALER_ROTATE_AND_CROP_AUTO) return BAD_VALUE;
     Mutex::Autolock lock(mServiceLock);
@@ -5505,7 +5573,7 @@
 
 status_t CameraService::handleSetAutoframing(const Vector<String16>& args) {
     char* end;
-    int autoframingValue = (int) strtol(String8(args[1]), &end, /*base=*/10);
+    int autoframingValue = (int) strtol(toStdString(args[1]).c_str(), &end, /*base=*/10);
     if ((*end != '\0') ||
             (autoframingValue != ANDROID_CONTROL_AUTOFRAMING_OFF &&
              autoframingValue != ANDROID_CONTROL_AUTOFRAMING_ON &&
@@ -5532,7 +5600,7 @@
 }
 
 status_t CameraService::handleSetCameraServiceWatchdog(const Vector<String16>& args) {
-    int enableWatchdog = atoi(String8(args[1]));
+    int enableWatchdog = atoi(toStdString(args[1]).c_str());
 
     if (enableWatchdog < 0 || enableWatchdog > 1) return BAD_VALUE;
 
@@ -5568,11 +5636,11 @@
 status_t CameraService::handleSetImageDumpMask(const Vector<String16>& args) {
     char *endPtr;
     errno = 0;
-    String8 maskString8 = String8(args[1]);
-    long maskValue = strtol(maskString8.c_str(), &endPtr, 10);
+    std::string maskString = toStdString(args[1]);
+    long maskValue = strtol(maskString.c_str(), &endPtr, 10);
 
     if (errno != 0) return BAD_VALUE;
-    if (endPtr != maskString8.c_str() + maskString8.size()) return BAD_VALUE;
+    if (endPtr != maskString.c_str() + maskString.size()) return BAD_VALUE;
     if (maskValue < 0 || maskValue > 1) return BAD_VALUE;
 
     Mutex::Autolock lock(mServiceLock);
@@ -5589,7 +5657,7 @@
 }
 
 status_t CameraService::handleSetCameraMute(const Vector<String16>& args) {
-    int muteValue = strtol(String8(args[1]), nullptr, 10);
+    int muteValue = strtol(toStdString(args[1]).c_str(), nullptr, 10);
     if (errno != 0) return BAD_VALUE;
 
     if (muteValue < 0 || muteValue > 1) return BAD_VALUE;
@@ -5616,23 +5684,23 @@
     std::vector<int64_t> useCasesOverride;
     for (size_t i = 1; i < args.size(); i++) {
         int64_t useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
-        String8 arg8 = String8(args[i]);
-        if (arg8 == "DEFAULT") {
+        std::string arg = toStdString(args[i]);
+        if (arg == "DEFAULT") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
-        } else if (arg8 == "PREVIEW") {
+        } else if (arg == "PREVIEW") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW;
-        } else if (arg8 == "STILL_CAPTURE") {
+        } else if (arg == "STILL_CAPTURE") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE;
-        } else if (arg8 == "VIDEO_RECORD") {
+        } else if (arg == "VIDEO_RECORD") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD;
-        } else if (arg8 == "PREVIEW_VIDEO_STILL") {
+        } else if (arg == "PREVIEW_VIDEO_STILL") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
-        } else if (arg8 == "VIDEO_CALL") {
+        } else if (arg == "VIDEO_CALL") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
-        } else if (arg8 == "CROPPED_RAW") {
+        } else if (arg == "CROPPED_RAW") {
             useCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
         } else {
-            ALOGE("%s: Invalid stream use case %s", __FUNCTION__, arg8.c_str());
+            ALOGE("%s: Invalid stream use case %s", __FUNCTION__, arg.c_str());
             return BAD_VALUE;
         }
         useCasesOverride.push_back(useCase);
@@ -5651,7 +5719,7 @@
 
 status_t CameraService::handleSetZoomOverride(const Vector<String16>& args) {
     char* end;
-    int zoomOverrideValue = strtol(String8(args[1]), &end, /*base=*/10);
+    int zoomOverrideValue = strtol(toStdString(args[1]).c_str(), &end, /*base=*/10);
     if ((*end != '\0') ||
             (zoomOverrideValue != -1 &&
              zoomOverrideValue != ANDROID_CONTROL_SETTINGS_OVERRIDE_OFF &&
@@ -5678,15 +5746,15 @@
 }
 
 status_t CameraService::handleWatchCommand(const Vector<String16>& args, int inFd, int outFd) {
-    if (args.size() >= 3 && args[1] == String16("start")) {
+    if (args.size() >= 3 && args[1] == toString16("start")) {
         return startWatchingTags(args, outFd);
-    } else if (args.size() == 2 && args[1] == String16("stop")) {
+    } else if (args.size() == 2 && args[1] == toString16("stop")) {
         return stopWatchingTags(outFd);
-    } else if (args.size() == 2 && args[1] == String16("dump")) {
+    } else if (args.size() == 2 && args[1] == toString16("dump")) {
         return printWatchedTags(outFd);
-    } else if (args.size() >= 2 && args[1] == String16("live")) {
+    } else if (args.size() >= 2 && args[1] == toString16("live")) {
         return printWatchedTagsUntilInterrupt(args, inFd, outFd);
-    } else if (args.size() == 2 && args[1] == String16("clear")) {
+    } else if (args.size() == 2 && args[1] == toString16("clear")) {
         return clearCachedMonitoredTagDumps(outFd);
     }
     dprintf(outFd, "Camera service watch commands:\n"
@@ -5707,7 +5775,7 @@
     Mutex::Autolock lock(mLogLock);
     size_t tagsIdx; // index of '-m'
     String16 tags("");
-    for (tagsIdx = 2; tagsIdx < args.size() && args[tagsIdx] != String16("-m"); tagsIdx++);
+    for (tagsIdx = 2; tagsIdx < args.size() && args[tagsIdx] != toString16("-m"); tagsIdx++);
     if (tagsIdx < args.size() - 1) {
         tags = args[tagsIdx + 1];
     } else {
@@ -5716,16 +5784,17 @@
     }
 
     size_t clientsIdx; // index of '-c'
-    String16 clients = kWatchAllClientsFlag; // watch all clients if no clients are provided
-    for (clientsIdx = 2; clientsIdx < args.size() && args[clientsIdx] != String16("-c");
+    // watch all clients if no clients are provided
+    String16 clients = toString16(kWatchAllClientsFlag);
+    for (clientsIdx = 2; clientsIdx < args.size() && args[clientsIdx] != toString16("-c");
          clientsIdx++);
     if (clientsIdx < args.size() - 1) {
         clients = args[clientsIdx + 1];
     }
-    parseClientsToWatchLocked(String8(clients));
+    parseClientsToWatchLocked(toStdString(clients));
 
     // track tags to initialize future clients with the monitoring information
-    mMonitorTags = String8(tags);
+    mMonitorTags = toStdString(tags);
 
     bool serviceLock = tryLock(mServiceLock);
     int numWatchedClients = 0;
@@ -5749,7 +5818,7 @@
 status_t CameraService::stopWatchingTags(int outFd) {
     // clear mMonitorTags to prevent new clients from monitoring tags at initialization
     Mutex::Autolock lock(mLogLock);
-    mMonitorTags = String8::empty();
+    mMonitorTags = "";
 
     mWatchedClientPackages.clear();
     mWatchedClientsDumpCache.clear();
@@ -5777,7 +5846,7 @@
 
 status_t CameraService::printWatchedTags(int outFd) {
     Mutex::Autolock logLock(mLogLock);
-    std::set<String16> connectedMonitoredClients;
+    std::set<std::string> connectedMonitoredClients;
 
     bool printedSomething = false; // tracks if any monitoring information was printed
                                    // (from either cached or active clients)
@@ -5800,17 +5869,14 @@
         }
 
         // Print tag dumps for active client
-        const String8 &cameraId = clientDescriptor->getKey();
-        String8 packageName8 = String8(client->getPackageName());
-        const char *printablePackageName = packageName8.lockBuffer(packageName8.size());
-        dprintf(outFd, "Client: %s (active)\n", printablePackageName);
+        const std::string &cameraId = clientDescriptor->getKey();
+        dprintf(outFd, "Client: %s (active)\n", client->getPackageName().c_str());
         while(printIdx > 0) {
             printIdx--;
-            dprintf(outFd, "%s:%s  %s", cameraId.string(), printablePackageName,
+            dprintf(outFd, "%s:%s  %s", cameraId.c_str(), client->getPackageName().c_str(),
                     dumpVector[printIdx].c_str());
         }
         dprintf(outFd, "\n");
-        packageName8.unlockBuffer();
         printedSomething = true;
 
         connectedMonitoredClients.emplace(client->getPackageName());
@@ -5819,12 +5885,12 @@
 
     // Print entries in mWatchedClientsDumpCache for clients that are not connected
     for (const auto &kv: mWatchedClientsDumpCache) {
-        const String16 &package = kv.first;
+        const std::string &package = kv.first;
         if (connectedMonitoredClients.find(package) != connectedMonitoredClients.end()) {
             continue;
         }
 
-        dprintf(outFd, "Client: %s (cached)\n", String8(package).string());
+        dprintf(outFd, "Client: %s (cached)\n", package.c_str());
         dprintf(outFd, "%s\n", kv.second.c_str());
         printedSomething = true;
     }
@@ -5838,8 +5904,8 @@
 
 // Print all events in vector `events' that came after lastPrintedEvent
 void printNewWatchedEvents(int outFd,
-                           const char *cameraId,
-                           const String16 &packageName,
+                           const std::string &cameraId,
+                           const std::string &packageName,
                            const std::vector<std::string> &events,
                            const std::string &lastPrintedEvent) {
     if (events.empty()) { return; }
@@ -5853,17 +5919,13 @@
 
     if (lastPrintedIdx == 0) { return; } // early exit if no new event in `events`
 
-    String8 packageName8(packageName);
-    const char *printablePackageName = packageName8.lockBuffer(packageName8.size());
-
     // print events in chronological order (latest event last)
     size_t idxToPrint = lastPrintedIdx;
     do {
         idxToPrint--;
-        dprintf(outFd, "%s:%s  %s", cameraId, printablePackageName, events[idxToPrint].c_str());
+        dprintf(outFd, "%s:%s  %s", cameraId.c_str(), packageName.c_str(),
+                events[idxToPrint].c_str());
     } while (idxToPrint != 0);
-
-    packageName8.unlockBuffer();
 }
 
 // Returns true if adb shell cmd watch should be interrupted based on data in inFd. The watch
@@ -5940,12 +6002,12 @@
     long refreshTimeoutMs = 1000L; // refresh every 1s by default
     if (args.size() > 2) {
         size_t intervalIdx; // index of '-n'
-        for (intervalIdx = 2; intervalIdx < args.size() && String16("-n") != args[intervalIdx];
+        for (intervalIdx = 2; intervalIdx < args.size() && toString16("-n") != args[intervalIdx];
              intervalIdx++);
 
         size_t intervalValIdx = intervalIdx + 1;
         if (intervalValIdx < args.size()) {
-            refreshTimeoutMs = strtol(String8(args[intervalValIdx].string()), nullptr, 10);
+            refreshTimeoutMs = strtol(toStdString(args[intervalValIdx]).c_str(), nullptr, 10);
             if (errno) { return BAD_VALUE; }
         }
     }
@@ -5954,7 +6016,7 @@
     refreshTimeoutMs = refreshTimeoutMs < 10 ? 10 : refreshTimeoutMs;
 
     dprintf(outFd, "Press return to exit...\n\n");
-    std::map<String16, std::string> packageNameToLastEvent;
+    std::map<std::string, std::string> packageNameToLastEvent;
 
     while (true) {
         bool serviceLock = tryLock(mServiceLock);
@@ -5969,7 +6031,7 @@
             if (client.get() == nullptr) { continue; }
             if (!isClientWatchedLocked(client.get())) { continue; }
 
-            const String16 &packageName = client->getPackageName();
+            const std::string &packageName = client->getPackageName();
             // This also initializes the map entries with an empty string
             const std::string& lastPrintedEvent = packageNameToLastEvent[packageName];
 
@@ -5977,15 +6039,12 @@
             client->dumpWatchedEventsToVector(latestEvents);
 
             if (!latestEvents.empty()) {
-                String8 cameraId = clientDescriptor->getKey();
-                const char *printableCameraId = cameraId.lockBuffer(cameraId.size());
                 printNewWatchedEvents(outFd,
-                                      printableCameraId,
+                                      clientDescriptor->getKey(),
                                       packageName,
                                       latestEvents,
                                       lastPrintedEvent);
                 packageNameToLastEvent[packageName] = latestEvents[0];
-                cameraId.unlockBuffer();
             }
         }
         if (shouldInterruptWatchCommand(inFd, outFd, refreshTimeoutMs)) {
@@ -5995,17 +6054,14 @@
     return OK;
 }
 
-void CameraService::parseClientsToWatchLocked(String8 clients) {
+void CameraService::parseClientsToWatchLocked(const std::string &clients) {
     mWatchedClientPackages.clear();
 
-    const char *allSentinel = String8(kWatchAllClientsFlag).string();
+    std::istringstream iss(clients);
+    std::string nextClient;
 
-    char *tokenized = clients.lockBuffer(clients.size());
-    char *savePtr;
-    char *nextClient = strtok_r(tokenized, ",", &savePtr);
-
-    while (nextClient != nullptr) {
-        if (strcmp(nextClient, allSentinel) == 0) {
+    while (std::getline(iss, nextClient, ',')) {
+        if (nextClient == kWatchAllClientsFlag) {
             // Don't need to track any other package if 'all' is present
             mWatchedClientPackages.clear();
             mWatchedClientPackages.emplace(kWatchAllClientsFlag);
@@ -6014,9 +6070,7 @@
 
         // track package names
         mWatchedClientPackages.emplace(nextClient);
-        nextClient = strtok_r(nullptr, ",", &savePtr);
     }
-    clients.unlockBuffer();
 }
 
 status_t CameraService::printHelp(int out) {
@@ -6045,8 +6099,10 @@
         "  clear-stream-use-case-override clear the stream use case override\n"
         "  set-zoom-override <-1/0/1> enable or disable zoom override\n"
         "      Valid values -1: do not override, 0: override to OFF, 1: override to ZOOM\n"
-        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
+        "  set-watchdog <VALUE> enables or disables the camera service watchdog\n"
+        "      Valid values 0=disable, 1=enable\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
+        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  help print this message\n");
 }
 
@@ -6081,13 +6137,13 @@
     return mode;
 }
 
-status_t CameraService::checkIfInjectionCameraIsPresent(const String8& externalCamId,
+status_t CameraService::checkIfInjectionCameraIsPresent(const std::string& externalCamId,
         sp<BasicClient> clientSp) {
     std::unique_ptr<AutoConditionLock> lock =
             AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
     status_t res = NO_ERROR;
     if ((res = checkIfDeviceIsUsable(externalCamId)) != NO_ERROR) {
-        ALOGW("Device %s is not usable!", externalCamId.string());
+        ALOGW("Device %s is not usable!", externalCamId.c_str());
         mInjectionStatusListener->notifyInjectionError(
                 externalCamId, UNKNOWN_TRANSACTION);
         clientSp->notifyError(
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 65b11e7..48954ca 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
@@ -121,19 +122,19 @@
     /////////////////////////////////////////////////////////////////////
     // HAL Callbacks - implements CameraProviderManager::StatusListener
 
-    virtual void        onDeviceStatusChanged(const String8 &cameraId,
+    virtual void        onDeviceStatusChanged(const std::string &cameraId,
             CameraDeviceStatus newHalStatus) override;
-    virtual void        onDeviceStatusChanged(const String8 &cameraId,
-            const String8 &physicalCameraId,
+    virtual void        onDeviceStatusChanged(const std::string &cameraId,
+            const std::string &physicalCameraId,
             CameraDeviceStatus newHalStatus) override;
     // This method may hold CameraProviderManager::mInterfaceMutex as a part
     // of calling getSystemCameraKind() internally. Care should be taken not to
     // directly / indirectly call this from callers who also hold
     // mInterfaceMutex.
-    virtual void        onTorchStatusChanged(const String8& cameraId,
+    virtual void        onTorchStatusChanged(const std::string& cameraId,
             TorchModeStatus newStatus) override;
     // Does not hold CameraProviderManager::mInterfaceMutex.
-    virtual void        onTorchStatusChanged(const String8& cameraId,
+    virtual void        onTorchStatusChanged(const std::string& cameraId,
             TorchModeStatus newStatus,
             SystemCameraKind kind) override;
     virtual void        onNewProviderRegistered() override;
@@ -147,7 +148,7 @@
 
     virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
             hardware::CameraInfo* cameraInfo) override;
-    virtual binder::Status     getCameraCharacteristics(const String16& cameraId,
+    virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
             int targetSdkVersion, bool overrideToPortrait, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
@@ -157,15 +158,16 @@
             hardware::camera2::params::VendorTagDescriptorCache* cache);
 
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, const String16& clientPackageName,
+            int32_t cameraId, const std::string& clientPackageName,
             int32_t clientUid, int clientPid, int targetSdkVersion,
             bool overrideToPortrait, bool forceSlowJpegMode,
             /*out*/
             sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
-            const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb, const String16& cameraId,
-            const String16& clientPackageName, const std::optional<String16>& clientFeatureId,
+            const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
+            const std::string& cameraId,
+            const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
             int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
@@ -187,15 +189,15 @@
     virtual binder::Status    getLegacyParameters(
             int32_t cameraId,
             /*out*/
-            String16* parameters);
+            std::string* parameters);
 
-    virtual binder::Status    setTorchMode(const String16& cameraId, bool enabled,
+    virtual binder::Status    setTorchMode(const std::string& cameraId, bool enabled,
             const sp<IBinder>& clientBinder);
 
-    virtual binder::Status    turnOnTorchWithStrengthLevel(const String16& cameraId,
+    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
             int32_t torchStrength, const sp<IBinder>& clientBinder);
 
-    virtual binder::Status    getTorchStrengthLevel(const String16& cameraId,
+    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId,
             int32_t* torchStrength);
 
     virtual binder::Status    notifySystemEvent(int32_t eventId,
@@ -207,24 +209,24 @@
 
     // OK = supports api of that version, -EOPNOTSUPP = does not support
     virtual binder::Status    supportsCameraApi(
-            const String16& cameraId, int32_t apiVersion,
+            const std::string& cameraId, int32_t apiVersion,
             /*out*/
             bool *isSupported);
 
     virtual binder::Status    isHiddenPhysicalCamera(
-            const String16& cameraId,
+            const std::string& cameraId,
             /*out*/
             bool *isSupported);
 
     virtual binder::Status injectCamera(
-            const String16& packageName, const String16& internalCamId,
-            const String16& externalCamId,
+            const std::string& packageName, const std::string& internalCamId,
+            const std::string& externalCamId,
             const sp<hardware::camera2::ICameraInjectionCallback>& callback,
             /*out*/
             sp<hardware::camera2::ICameraInjectionSession>* cameraInjectionSession);
 
     virtual binder::Status reportExtensionSessionStats(
-            const hardware::CameraExtensionSessionStats& stats, String16* sessionKey /*out*/);
+            const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
 
     virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
         cameraIdRemapping);
@@ -250,7 +252,7 @@
     void cacheDump();
 
     // Register an offline client for a given active camera id
-    status_t addOfflineClient(String8 cameraId, sp<BasicClient> offlineClient);
+    status_t addOfflineClient(const std::string &cameraId, sp<BasicClient> offlineClient);
 
     /////////////////////////////////////////////////////////////////////
     // Client functionality
@@ -269,7 +271,7 @@
 
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
-    std::pair<int, IPCTransport>    getDeviceVersion(const String8& cameraId,
+    std::pair<int, IPCTransport>    getDeviceVersion(const std::string& cameraId,
             bool overrideToPortrait, int* portraitRotation,
             int* facing = nullptr, int* orientation = nullptr);
 
@@ -297,7 +299,7 @@
     friend class CameraService;
     public:
         virtual status_t       initialize(sp<CameraProviderManager> manager,
-                const String8& monitorTags) = 0;
+                const std::string& monitorTags) = 0;
         virtual binder::Status disconnect();
 
         // because we can't virtually inherit IInterface, which breaks
@@ -318,12 +320,12 @@
         // Internal dump method to be called by CameraService
         virtual status_t dumpClient(int fd, const Vector<String16>& args) = 0;
 
-        virtual status_t startWatchingTags(const String8 &tags, int outFd);
+        virtual status_t startWatchingTags(const std::string &tags, int outFd);
         virtual status_t stopWatchingTags(int outFd);
         virtual status_t dumpWatchedEventsToVector(std::vector<std::string> &out);
 
         // Return the package name for this client
-        virtual String16 getPackageName() const;
+        virtual std::string getPackageName() const;
 
         // Return the camera facing for this client
         virtual int getCameraFacing() const;
@@ -391,7 +393,7 @@
 
         // The injection camera session to replace the internal camera
         // session.
-        virtual status_t injectCamera(const String8& injectedCamId,
+        virtual status_t injectCamera(const std::string& injectedCamId,
                 sp<CameraProviderManager> manager) = 0;
 
         // Stop the injection camera and restore to internal camera session.
@@ -400,10 +402,10 @@
     protected:
         BasicClient(const sp<CameraService>& cameraService,
                 const sp<IBinder>& remoteCallback,
-                const String16& clientPackageName,
+                const std::string& clientPackageName,
                 bool nativeClient,
-                const std::optional<String16>& clientFeatureId,
-                const String8& cameraIdStr,
+                const std::optional<std::string>& clientFeatureId,
+                const std::string& cameraIdStr,
                 int cameraFacing,
                 int sensorOrientation,
                 int clientPid,
@@ -421,12 +423,12 @@
 
         // these are initialized in the constructor.
         static sp<CameraService>        sCameraService;
-        const String8                   mCameraIdStr;
+        const std::string               mCameraIdStr;
         const int                       mCameraFacing;
         const int                       mOrientation;
-        String16                        mClientPackageName;
+        std::string                     mClientPackageName;
         bool                            mSystemNativeClient;
-        std::optional<String16>         mClientFeatureId;
+        std::optional<std::string>      mClientFeatureId;
         pid_t                           mClientPid;
         const uid_t                     mClientUid;
         const pid_t                     mServicePid;
@@ -513,10 +515,10 @@
         // Interface used by CameraService
         Client(const sp<CameraService>& cameraService,
                 const sp<hardware::ICameraClient>& cameraClient,
-                const String16& clientPackageName,
+                const std::string& clientPackageName,
                 bool systemNativeClient,
-                const std::optional<String16>& clientFeatureId,
-                const String8& cameraIdStr,
+                const std::optional<std::string>& clientFeatureId,
+                const std::string& cameraIdStr,
                 int api1CameraId,
                 int cameraFacing,
                 int sensorOrientation,
@@ -560,13 +562,13 @@
      */
     class ClientEventListener {
     public:
-        void onClientAdded(const resource_policy::ClientDescriptor<String8,
+        void onClientAdded(const resource_policy::ClientDescriptor<std::string,
                 sp<CameraService::BasicClient>>& descriptor);
-        void onClientRemoved(const resource_policy::ClientDescriptor<String8,
+        void onClientRemoved(const resource_policy::ClientDescriptor<std::string,
                 sp<CameraService::BasicClient>>& descriptor);
     }; // class ClientEventListener
 
-    typedef std::shared_ptr<resource_policy::ClientDescriptor<String8,
+    typedef std::shared_ptr<resource_policy::ClientDescriptor<std::string,
             sp<CameraService::BasicClient>>> DescriptorPtr;
 
     /**
@@ -577,7 +579,7 @@
      * This class manages the eviction behavior for the camera clients.  See the parent class
      * implementation in utils/ClientManager for the specifics of this behavior.
      */
-    class CameraClientManager : public resource_policy::ClientManager<String8,
+    class CameraClientManager : public resource_policy::ClientManager<std::string,
             sp<CameraService::BasicClient>, ClientEventListener> {
     public:
         CameraClientManager();
@@ -587,18 +589,19 @@
          * Return a strong pointer to the active BasicClient for this camera ID, or an empty
          * if none exists.
          */
-        sp<CameraService::BasicClient> getCameraClient(const String8& id) const;
+        sp<CameraService::BasicClient> getCameraClient(const std::string& id) const;
 
         /**
          * Return a string describing the current state.
          */
-        String8 toString() const;
+        std::string toString() const;
 
         /**
          * Make a ClientDescriptor object wrapping the given BasicClient strong pointer.
          */
-        static DescriptorPtr makeClientDescriptor(const String8& key, const sp<BasicClient>& value,
-                int32_t cost, const std::set<String8>& conflictingKeys, int32_t score,
+        static DescriptorPtr makeClientDescriptor(const std::string& key,
+                const sp<BasicClient>& value, int32_t cost,
+                const std::set<std::string>& conflictingKeys, int32_t score,
                 int32_t ownerId, int32_t state, int oomScoreOffset, bool systemNativeClient);
 
         /**
@@ -615,6 +618,13 @@
     int32_t updateAudioRestrictionLocked();
 
 private:
+    /**
+     * Returns true if the device is an automotive device and cameraId is system
+     * only camera which has characteristic AUTOMOTIVE_LOCATION value as either
+     * AUTOMOTIVE_LOCATION_EXTERIOR_LEFT,AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT,
+     * AUTOMOTIVE_LOCATION_EXTERIOR_FRONT or AUTOMOTIVE_LOCATION_EXTERIOR_REAR.
+     */
+    bool isAutomotiveExteriorSystemCamera(const std::string& cameraId) const;
 
     // TODO: b/263304156 update this to make use of a death callback for more
     // robust/fault tolerant logging
@@ -631,6 +641,22 @@
     }
 
     /**
+     * Pre-grants the permission if the attribution source uid is for an automotive
+     * privileged client. Otherwise uses system service permission checker to check
+     * for the appropriate permission. If this function is called for accessing a specific
+     * camera,then the cameraID must not be empty. CameraId is used only in case of automotive
+     * privileged client so that permission is pre-granted only to access system camera device
+     * which is located outside of the vehicle body frame because camera located inside the vehicle
+     * cabin would need user permission.
+     */
+    bool checkPermission(const std::string& cameraId, const std::string& permission,
+            const content::AttributionSourceState& attributionSource, const std::string& message,
+            int32_t attributedOpCode) const;
+
+    bool hasPermissionsForSystemCamera(const std::string& cameraId, int callingPid, int callingUid)
+            const;
+
+   /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
      */
@@ -658,7 +684,7 @@
          * Make a new CameraState and set the ID, cost, and conflicting devices using the values
          * returned in the HAL's camera_info struct for each device.
          */
-        CameraState(const String8& id, int cost, const std::set<String8>& conflicting,
+        CameraState(const std::string& id, int cost, const std::set<std::string>& conflicting,
                 SystemCameraKind deviceKind, const std::vector<std::string>& physicalCameras);
         virtual ~CameraState();
 
@@ -672,7 +698,7 @@
         /**
          * This function updates the status for this camera device, unless the given status
          * is in the given list of rejected status states, and execute the function passed in
-         * with a signature onStatusUpdateLocked(const String8&, int32_t)
+         * with a signature onStatusUpdateLocked(const std::string&, int32_t)
          * if the status has changed.
          *
          * This method is idempotent, and will not result in the function passed to
@@ -681,7 +707,7 @@
          */
         template<class Func>
         void updateStatus(StatusInternal status,
-                const String8& cameraId,
+                const std::string& cameraId,
                 std::initializer_list<StatusInternal> rejectSourceStates,
                 Func onStatusUpdatedLocked);
 
@@ -704,12 +730,7 @@
         /**
          * Return a set of the IDs of conflicting devices advertised by the HAL for this device.
          */
-        std::set<String8> getConflicting() const;
-
-        /**
-         * Return the ID of this camera device.
-         */
-        String8 getId() const;
+        std::set<std::string> getConflicting() const;
 
         /**
          * Return the kind (SystemCameraKind) of this camera device.
@@ -725,28 +746,28 @@
         /**
          * Add/Remove the unavailable physical camera ID.
          */
-        bool addUnavailablePhysicalId(const String8& physicalId);
-        bool removeUnavailablePhysicalId(const String8& physicalId);
+        bool addUnavailablePhysicalId(const std::string& physicalId);
+        bool removeUnavailablePhysicalId(const std::string& physicalId);
 
         /**
          * Set and get client package name.
          */
-        void setClientPackage(const String8& clientPackage);
-        String8 getClientPackage() const;
+        void setClientPackage(const std::string& clientPackage);
+        std::string getClientPackage() const;
 
         /**
          * Return the unavailable physical ids for this device.
          *
          * This method acquires mStatusLock.
          */
-        std::vector<String8> getUnavailablePhysicalIds() const;
+        std::vector<std::string> getUnavailablePhysicalIds() const;
     private:
-        const String8 mId;
+        const std::string mId;
         StatusInternal mStatus; // protected by mStatusLock
         const int mCost;
-        std::set<String8> mConflicting;
-        std::set<String8> mUnavailablePhysicalIds;
-        String8 mClientPackage;
+        std::set<std::string> mConflicting;
+        std::set<std::string> mUnavailablePhysicalIds;
+        std::string mClientPackage;
         mutable Mutex mStatusLock;
         CameraParameters mShimParams;
         const SystemCameraKind mSystemCameraKind;
@@ -766,7 +787,7 @@
         void registerSelf();
         void unregisterSelf();
 
-        bool isUidActive(uid_t uid, String16 callingPackage);
+        bool isUidActive(uid_t uid, const std::string &callingPackage);
         int32_t getProcState(uid_t uid);
 
         // IUidObserver
@@ -777,8 +798,8 @@
                 int32_t capability) override;
         void onUidProcAdjChanged(uid_t uid, int adj) override;
 
-        void addOverrideUid(uid_t uid, String16 callingPackage, bool active);
-        void removeOverrideUid(uid_t uid, String16 callingPackage);
+        void addOverrideUid(uid_t uid, const std::string &callingPackage, bool active);
+        void removeOverrideUid(uid_t uid, const std::string &callingPackage);
 
         void registerMonitorUid(uid_t uid, bool openCamera);
         void unregisterMonitorUid(uid_t uid, bool closeCamera);
@@ -789,9 +810,10 @@
         // IBinder::DeathRecipient implementation
         virtual void binderDied(const wp<IBinder> &who);
     private:
-        bool isUidActiveLocked(uid_t uid, String16 callingPackage);
+        bool isUidActiveLocked(uid_t uid, const std::string &callingPackage);
         int32_t getProcStateLocked(uid_t uid);
-        void updateOverrideUid(uid_t uid, String16 callingPackage, bool active, bool insert);
+        void updateOverrideUid(uid_t uid, const std::string &callingPackage, bool active,
+                bool insert);
         void registerWithActivityManager();
 
         struct MonitoredUid {
@@ -861,8 +883,8 @@
 
     // Add/remove a new camera to camera and torch state lists or remove an unplugged one
     // Caller must not hold mServiceLock
-    void addStates(const String8 id);
-    void removeStates(const String8 id);
+    void addStates(const std::string& id);
+    void removeStates(const std::string& id);
 
     // Check if we can connect, before we acquire the service lock.
     // The returned originalClientPid is the PID of the original process that wants to connect to
@@ -871,36 +893,37 @@
     // originalClientPid and clientPid are usually the same except when the application uses
     // mediaserver to connect to camera (using MediaRecorder to connect to camera). In that case,
     // clientPid is the PID of mediaserver and originalClientPid is the PID of the application.
-    binder::Status validateConnectLocked(const String8& cameraId, const String8& clientName8,
+    binder::Status validateConnectLocked(const std::string& cameraId, const std::string& clientName,
             /*inout*/int& clientUid, /*inout*/int& clientPid, /*out*/int& originalClientPid) const;
-    binder::Status validateClientPermissionsLocked(const String8& cameraId, const String8& clientName8,
-            /*inout*/int& clientUid, /*inout*/int& clientPid, /*out*/int& originalClientPid) const;
+    binder::Status validateClientPermissionsLocked(const std::string& cameraId,
+            const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
+            /*out*/int& originalClientPid) const;
 
     // Handle active client evictions, and update service state.
     // Only call with with mServiceLock held.
-    status_t handleEvictionsLocked(const String8& cameraId, int clientPid,
-        apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName,
-        int scoreOffset, bool systemNativeClient,
+    status_t handleEvictionsLocked(const std::string& cameraId, int clientPid,
+        apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback,
+        const std::string& packageName, int scoreOffset, bool systemNativeClient,
         /*out*/
         sp<BasicClient>* client,
-        std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial);
+        std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>>* partial);
 
     // Should an operation attempt on a cameraId be rejected ? (this can happen
     // under various conditions. For example if a camera device is advertised as
     // system only or hidden secure camera, amongst possible others.
-    bool shouldRejectSystemCameraConnection(const String8 & cameraId) const;
+    bool shouldRejectSystemCameraConnection(const std::string& cameraId) const;
 
     // Should a device status update be skipped for a particular camera device ? (this can happen
     // under various conditions. For example if a camera device is advertised as
     // system only or hidden secure camera, amongst possible others.
-    static bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
+    bool shouldSkipStatusUpdates(SystemCameraKind systemCameraKind, bool isVendorListener,
             int clientPid, int clientUid);
 
     // Gets the kind of camera device (i.e public, hidden secure or system only)
     // getSystemCameraKind() needs mInterfaceMutex which might lead to deadlocks
     // if held along with mStatusListenerLock (depending on lock ordering, b/141756275), it is
     // recommended that we don't call this function with mStatusListenerLock held.
-    status_t getSystemCameraKind(const String8& cameraId, SystemCameraKind *kind) const;
+    status_t getSystemCameraKind(const std::string& cameraId, SystemCameraKind *kind) const;
 
     // Update the set of API1Compatible camera devices without including system
     // cameras and secure cameras. This is used for hiding system only cameras
@@ -916,15 +939,15 @@
     // as for legacy apps we will toggle the app op for all packages in the UID.
     // The caveat is that the operation may be attributed to the wrong package and
     // stats based on app ops may be slightly off.
-    String16 getPackageNameFromUid(int clientUid);
+    std::string getPackageNameFromUid(int clientUid);
 
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
-    binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
-            int api1CameraId, const String16& clientPackageNameMaybe, bool systemNativeClient,
-            const std::optional<String16>& clientFeatureId, int clientUid, int clientPid,
+    binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
+            int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
+            const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode, const String8& originalCameraId,
+            bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -934,19 +957,20 @@
     std::shared_ptr<WaitableMutexWrapper> mServiceLockWrapper;
 
     // Return NO_ERROR if the device with a give ID can be connected to
-    status_t checkIfDeviceIsUsable(const String8& cameraId) const;
+    status_t checkIfDeviceIsUsable(const std::string& cameraId) const;
 
     // Container for managing currently active application-layer clients
     CameraClientManager mActiveClientManager;
 
     // Adds client logs during open session to the file pointed by fd.
-    void dumpOpenSessionClientLogs(int fd, const Vector<String16>& args, const String8& cameraId);
+    void dumpOpenSessionClientLogs(int fd, const Vector<String16>& args,
+            const std::string& cameraId);
 
     // Adds client logs during closed session to the file pointed by fd.
-    void dumpClosedSessionClientLogs(int fd, const String8& cameraId);
+    void dumpClosedSessionClientLogs(int fd, const std::string& cameraId);
 
     // Mapping from camera ID -> state for each device, map is protected by mCameraStatesLock
-    std::map<String8, std::shared_ptr<CameraState>> mCameraStates;
+    std::map<std::string, std::shared_ptr<CameraState>> mCameraStates;
 
     // Mutex guarding mCameraStates map
     mutable Mutex mCameraStatesLock;
@@ -957,7 +981,7 @@
      * This specifies that for packageName, for every binder operation targeting
      * cameraIdToReplace, use newCameraIdToUse instead.
      */
-    typedef std::map<String16, std::map<String8, String8>> TCameraIdRemapping;
+    typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
     TCameraIdRemapping mCameraIdRemapping{};
     /** Mutex guarding mCameraIdRemapping. */
     Mutex mCameraIdRemappingLock;
@@ -973,14 +997,14 @@
      * This returns the Camera Id to use in case inputCameraId was remapped to a
      * different Id for the given packageName. Otherwise, it returns the inputCameraId.
      */
-    String8 resolveCameraId(const String8& inputCameraId, const String16& packageName);
+    std::string resolveCameraId(const std::string& inputCameraId, const std::string& packageName);
     /**
      * Resolve the (potentially remapped) camera Id to use.
      *
      * This returns the Camera Id to use in case inputCameraId was remapped to a
      * different Id for the packageName of the client. Otherwise, it returns the inputCameraId.
      */
-    String8 resolveCameraId(const String8& inputCameraId);
+    std::string resolveCameraId(const std::string& inputCameraId);
 
     /**
      * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
@@ -990,23 +1014,23 @@
     /**
      * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
      */
-    std::vector<String8> findOriginalIdsForRemappedCameraId(
-        const String8& inputCameraId, int clientUid);
+    std::vector<std::string> findOriginalIdsForRemappedCameraId(
+        const std::string& inputCameraId, int clientUid);
 
     // Circular buffer for storing event logging for dumps
-    RingBuffer<String8> mEventLog;
+    RingBuffer<std::string> mEventLog;
     Mutex mLogLock;
 
     // set of client package names to watch. if this set contains 'all', then all clients will
     // be watched. Access should be guarded by mLogLock
-    std::set<String16> mWatchedClientPackages;
+    std::set<std::string> mWatchedClientPackages;
     // cache of last monitored tags dump immediately before the client disconnects. If a client
     // re-connects, its entry is not updated until it disconnects again. Access should be guarded
     // by mLogLock
-    std::map<String16, std::string> mWatchedClientsDumpCache;
+    std::map<std::string, std::string> mWatchedClientsDumpCache;
 
     // The last monitored tags set by client
-    String8 mMonitorTags;
+    std::string mMonitorTags;
 
     // Currently allowed user IDs
     std::set<userid_t> mAllowedUsers;
@@ -1016,7 +1040,7 @@
      *
      * This acquires mCameraStatesLock.
      */
-    std::shared_ptr<CameraService::CameraState> getCameraState(const String8& cameraId) const;
+    std::shared_ptr<CameraService::CameraState> getCameraState(const std::string& cameraId) const;
 
     /**
      * Evict client who's remote binder has died.  Returns true if this client was in the active
@@ -1047,7 +1071,7 @@
      * Returns the underlying camera Id string mapped to a camera id int
      * Empty string is returned when the cameraIdInt is invalid.
      */
-    String8 cameraIdIntToStr(int cameraIdInt);
+    std::string cameraIdIntToStr(int cameraIdInt);
 
     /**
      * Returns the underlying camera Id string mapped to a camera id int
@@ -1061,7 +1085,7 @@
      *
      * This method must be called with mServiceLock held.
      */
-    sp<CameraService::BasicClient> removeClientLocked(const String8& cameraId);
+    sp<CameraService::BasicClient> removeClientLocked(const std::string& cameraId);
 
     /**
      * Handle a notification that the current device user has changed.
@@ -1071,39 +1095,41 @@
     /**
      * Add an event log message.
      */
-    void logEvent(const char* event);
+    void logEvent(const std::string &event);
 
     /**
      * Add an event log message that a client has been disconnected.
      */
-    void logDisconnected(const char* cameraId, int clientPid, const char* clientPackage);
+    void logDisconnected(const std::string &cameraId, int clientPid,
+            const std::string &clientPackage);
 
     /**
      * Add an event log message that a client has been disconnected from offline device.
      */
-    void logDisconnectedOffline(const char* cameraId, int clientPid, const char* clientPackage);
+    void logDisconnectedOffline(const std::string &cameraId, int clientPid,
+            const std::string &clientPackage);
 
     /**
      * Add an event log message that an offline client has been connected.
      */
-    void logConnectedOffline(const char* cameraId, int clientPid,
-            const char* clientPackage);
+    void logConnectedOffline(const std::string &cameraId, int clientPid,
+            const std::string &clientPackage);
 
     /**
      * Add an event log message that a client has been connected.
      */
-    void logConnected(const char* cameraId, int clientPid, const char* clientPackage);
+    void logConnected(const std::string &cameraId, int clientPid, const std::string &clientPackage);
 
     /**
      * Add an event log message that a client's connect attempt has been rejected.
      */
-    void logRejected(const char* cameraId, int clientPid, const char* clientPackage,
-            const char* reason);
+    void logRejected(const std::string &cameraId, int clientPid, const std::string &clientPackage,
+            const std::string &reason);
 
     /**
      * Add an event log message when a client calls setTorchMode succesfully.
      */
-    void logTorchEvent(const char* cameraId, const char *torchState, int clientPid);
+    void logTorchEvent(const std::string &cameraId, const std::string &torchState, int clientPid);
 
     /**
      * Add an event log message that the current device user has been switched.
@@ -1114,30 +1140,30 @@
     /**
      * Add an event log message that a device has been removed by the HAL
      */
-    void logDeviceRemoved(const char* cameraId, const char* reason);
+    void logDeviceRemoved(const std::string &cameraId, const std::string &reason);
 
     /**
      * Add an event log message that a device has been added by the HAL
      */
-    void logDeviceAdded(const char* cameraId, const char* reason);
+    void logDeviceAdded(const std::string &cameraId, const std::string &reason);
 
     /**
      * Add an event log message that a client has unexpectedly died.
      */
-    void logClientDied(int clientPid, const char* reason);
+    void logClientDied(int clientPid, const std::string &reason);
 
     /**
      * Add a event log message that a serious service-level error has occured
      * The errorCode should be one of the Android Errors
      */
-    void logServiceError(const char* msg, int errorCode);
+    void logServiceError(const std::string &msg, int errorCode);
 
     /**
      * Dump the event log to an FD
      */
     void dumpEventLog(int fd);
 
-    void cacheClientTagDumpIfNeeded(const char *cameraId, BasicClient *client);
+    void cacheClientTagDumpIfNeeded(const std::string &cameraId, BasicClient *client);
 
     /**
      * This method will acquire mServiceLock
@@ -1253,18 +1279,19 @@
      * This method acquires mStatusLock and mStatusListenerLock.
      */
     void updateStatus(StatusInternal status,
-            const String8& cameraId,
+            const std::string& cameraId,
             std::initializer_list<StatusInternal>
                 rejectedSourceStates);
     void updateStatus(StatusInternal status,
-            const String8& cameraId);
+            const std::string& cameraId);
 
     /**
      * Update the opened/closed status of the given camera id.
      *
      * This method acqiures mStatusListenerLock.
      */
-    void updateOpenCloseStatus(const String8& cameraId, bool open, const String16& packageName);
+    void updateOpenCloseStatus(const std::string& cameraId, bool open,
+            const std::string& packageName);
 
     // flashlight control
     sp<CameraFlashlight> mFlashlight;
@@ -1275,38 +1302,38 @@
     // guard mTorchUidMap
     Mutex                mTorchUidMapMutex;
     // camera id -> torch status
-    KeyedVector<String8, TorchModeStatus>
+    KeyedVector<std::string, TorchModeStatus>
             mTorchStatusMap;
     // camera id -> torch client binder
     // only store the last client that turns on each camera's torch mode
-    KeyedVector<String8, sp<IBinder>> mTorchClientMap;
+    KeyedVector<std::string, sp<IBinder>> mTorchClientMap;
     // camera id -> [incoming uid, current uid] pair
-    std::map<String8, std::pair<int, int>> mTorchUidMap;
+    std::map<std::string, std::pair<int, int>> mTorchUidMap;
 
     // check and handle if torch client's process has died
     void handleTorchClientBinderDied(const wp<IBinder> &who);
 
     // handle torch mode status change and invoke callbacks. mTorchStatusMutex
     // should be locked.
-    void onTorchStatusChangedLocked(const String8& cameraId,
+    void onTorchStatusChangedLocked(const std::string& cameraId,
             TorchModeStatus newStatus,
             SystemCameraKind systemCameraKind);
 
     // get a camera's torch status. mTorchStatusMutex should be locked.
-    status_t getTorchStatusLocked(const String8 &cameraId,
+    status_t getTorchStatusLocked(const std::string &cameraId,
              TorchModeStatus *status) const;
 
     // set a camera's torch status. mTorchStatusMutex should be locked.
-    status_t setTorchStatusLocked(const String8 &cameraId,
+    status_t setTorchStatusLocked(const std::string &cameraId,
             TorchModeStatus status);
 
     // notify physical camera status when the physical camera is public.
     // Expects mStatusListenerLock to be locked.
-    void notifyPhysicalCameraStatusLocked(int32_t status, const String16& physicalCameraId,
-            const std::list<String16>& logicalCameraIds, SystemCameraKind deviceKind);
+    void notifyPhysicalCameraStatusLocked(int32_t status, const std::string& physicalCameraId,
+            const std::list<std::string>& logicalCameraIds, SystemCameraKind deviceKind);
 
     // get list of logical cameras which are backed by physicalCameraId
-    std::list<String16> getLogicalCameras(const String8& physicalCameraId);
+    std::list<std::string> getLogicalCameras(const std::string& physicalCameraId);
 
 
     // IBinder::DeathRecipient implementation
@@ -1401,7 +1428,7 @@
     // Parses comma separated clients list and adds them to mWatchedClientPackages.
     // Does not acquire mLogLock before modifying mWatchedClientPackages. It is the caller's
     // responsibility to acquire mLogLock before calling this function.
-    void parseClientsToWatchLocked(String8 clients);
+    void parseClientsToWatchLocked(const std::string &clients);
 
     // Prints the shell command help
     status_t printHelp(int out);
@@ -1418,40 +1445,38 @@
     /**
      * Get the current system time as a formatted string.
      */
-    static String8 getFormattedCurrentTime();
+    static std::string getFormattedCurrentTime();
 
-    static binder::Status makeClient(
-            const sp<CameraService>& cameraService, const sp<IInterface>& cameraCb,
-            const String16& packageName, bool systemNativeClient,
-            const std::optional<String16>& featureId, const String8& cameraId, int api1CameraId,
-            int facing, int sensorOrientation, int clientPid, uid_t clientUid, int servicePid,
+    static binder::Status makeClient(const sp<CameraService>& cameraService,
+            const sp<IInterface>& cameraCb, const std::string& packageName,
+            bool systemNativeClient, const std::optional<std::string>& featureId,
+            const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
+            int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
             bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
-            const String8& originalCameraId,
+            const std::string& originalCameraId,
             /*out*/ sp<BasicClient>* client);
 
-    status_t checkCameraAccess(const String16& opPackageName);
-
-    static String8 toString(std::set<userid_t> intSet);
+    static std::string toString(std::set<userid_t> intSet);
     static int32_t mapToInterface(TorchModeStatus status);
     static StatusInternal mapToInternal(CameraDeviceStatus status);
     static int32_t mapToInterface(StatusInternal status);
 
 
-    void broadcastTorchModeStatus(const String8& cameraId,
+    void broadcastTorchModeStatus(const std::string& cameraId,
             TorchModeStatus status, SystemCameraKind systemCameraKind);
 
-    void broadcastTorchStrengthLevel(const String8& cameraId, int32_t newTorchStrengthLevel);
+    void broadcastTorchStrengthLevel(const std::string& cameraId, int32_t newTorchStrengthLevel);
 
-    void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
+    void disconnectClient(const std::string& id, sp<BasicClient> clientToDisconnect);
 
     // Regular online and offline devices must not be in conflict at camera service layer.
     // Use separate keys for offline devices.
-    static const String8 kOfflineDevice;
+    static const std::string kOfflineDevice;
 
     // Sentinel value to be stored in `mWatchedClientsPackages` to indicate that all clients should
     // be watched.
-    static const String16 kWatchAllClientsFlag;
+    static const std::string kWatchAllClientsFlag;
 
     // TODO: right now each BasicClient holds one AppOpsManager instance.
     // We can refactor the code so all of clients share this instance
@@ -1492,7 +1517,7 @@
 
             void addListener(const sp<hardware::camera2::ICameraInjectionCallback>& callback);
             void removeListener();
-            void notifyInjectionError(String8 injectedCamId, status_t err);
+            void notifyInjectionError(const std::string &injectedCamId, status_t err);
 
             // IBinder::DeathRecipient implementation
             virtual void binderDied(const wp<IBinder>& who);
@@ -1521,15 +1546,15 @@
 
     // When injecting the camera, it will check whether the injecting camera status is unavailable.
     // If it is, the disconnect function will be called to to prevent camera access on the device.
-    status_t checkIfInjectionCameraIsPresent(const String8& externalCamId,
+    status_t checkIfInjectionCameraIsPresent(const std::string& externalCamId,
             sp<BasicClient> clientSp);
 
     void clearInjectionParameters();
 
     // This is the existing camera id being replaced.
-    String8 mInjectionInternalCamId;
+    std::string mInjectionInternalCamId;
     // This is the external camera Id replacing the internalId.
-    String8 mInjectionExternalCamId;
+    std::string mInjectionExternalCamId;
     bool mInjectionInitPending = false;
     // Guard mInjectionInternalCamId and mInjectionInitPending.
     Mutex mInjectionParametersLock;
@@ -1537,7 +1562,7 @@
     // Track the folded/unfoled device state. 0 == UNFOLDED, 4 == FOLDED
     int64_t mDeviceState;
 
-    void updateTorchUidMapLocked(const String16& cameraId, int uid);
+    void updateTorchUidMapLocked(const std::string& cameraId, int uid);
 };
 
 } // namespace android
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index de6ac9e..9f25865 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -32,7 +32,6 @@
 #include <chrono>
 #include <thread>
 #include <time.h>
-#include <utils/String8.h>
 #include <utils/Thread.h>
 #include <utils/Log.h>
 #include <unordered_map>
@@ -58,13 +57,13 @@
 };
 
 public:
-    explicit CameraServiceWatchdog(const String8 &cameraId,
+    explicit CameraServiceWatchdog(const std::string &cameraId,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
                     mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
                     mCycleLengthMs(kCycleLengthMs), mEnabled(true),
                     mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
 
-    explicit CameraServiceWatchdog (const String8 &cameraId, size_t maxCycles,
+    explicit CameraServiceWatchdog (const std::string &cameraId, size_t maxCycles,
             uint32_t cycleLengthMs, bool enabled,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
                     mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
@@ -151,7 +150,7 @@
     Mutex           mWatchdogLock;      // Lock for condition variable
     Mutex           mEnabledLock;       // Lock for enabled status
     Condition       mWatchdogCondition; // Condition variable for stop/start
-    String8         mCameraId;          // Camera Id the watchdog belongs to
+    std::string     mCameraId;          // Camera Id the watchdog belongs to
     bool            mPause;             // True if tid map is empty
     uint32_t        mMaxCycles;         // Max cycles
     uint32_t        mCycleLengthMs;     // Length of time elapsed per cycle
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 4232a81..a62f6de 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -86,7 +86,7 @@
     if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
 
     ::android::CameraMetadata cameraMetadata;
-    UStatus ret = mCameraService->getCameraCharacteristics(String16(in_cameraId.c_str()),
+    UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
                                                            /* overrideToPortrait= */ false,
                                                            &cameraMetadata);
@@ -140,8 +140,8 @@
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
     binder::Status serviceRet = mCameraService->connectDevice(
             callbacks,
-            String16(in_cameraId.c_str()),
-            String16(""),
+            in_cameraId,
+            std::string(),
             /* clientFeatureId= */{},
             hardware::ICameraService::USE_CALLING_UID,
             /* scoreOffset= */ 0,
@@ -249,7 +249,7 @@
             [this](const hardware::CameraStatus& s) {
                 bool supportsHAL3 = false;
                 binder::Status sRet =
-                            mCameraService->supportsCameraApi(String16(s.cameraId),
+                            mCameraService->supportsCameraApi(s.cameraId,
                                     UICameraService::API_VERSION_2, &supportsHAL3);
                 return !sRet.isOk() || !supportsHAL3;
             }), cameraStatusAndIds->end());
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
index e183063..d7ab0d9 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.cpp
@@ -18,6 +18,7 @@
 #include <aidl/AidlUtils.h>
 #include <aidl/android/frameworks/cameraservice/common/Status.h>
 #include <aidl/android/frameworks/cameraservice/service/CameraStatusAndId.h>
+#include <camera/StringUtils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
 
@@ -27,34 +28,31 @@
 using SStatus = ::aidl::android::frameworks::cameraservice::common::Status;
 
 binder::Status AidlCameraServiceListener::onStatusChanged(
-        int32_t status, const ::android::String16& cameraId) {
+        int32_t status, const std::string& cameraId) {
     SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
-    std::string sCameraId = String8(cameraId).string();
-    auto ret = mBase->onStatusChanged(sStatus, sCameraId);
+    auto ret = mBase->onStatusChanged(sStatus, cameraId);
     LOG_STATUS_ERROR_IF_NOT_OK(ret, "onStatusChanged")
     return binder::Status::ok();
 }
 
 binder::Status AidlCameraServiceListener::onPhysicalCameraStatusChanged(
-        int32_t status, const ::android::String16& cameraId,
-        const ::android::String16& physicalCameraId) {
+        int32_t status, const std::string& cameraId,
+        const std::string& physicalCameraId) {
     SCameraDeviceStatus sStatus = convertCameraStatusToAidl(status);
-    std::string sCameraId = String8(cameraId).string();
-    std::string sPhysicalCameraId = String8(physicalCameraId).string();
 
-    auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, sCameraId, sPhysicalCameraId);
+    auto ret = mBase->onPhysicalCameraStatusChanged(sStatus, cameraId, physicalCameraId);
     LOG_STATUS_ERROR_IF_NOT_OK(ret, "onPhysicalCameraStatusChanged")
     return binder::Status::ok();
 }
 
 ::android::binder::Status AidlCameraServiceListener::onTorchStatusChanged(
-    int32_t, const ::android::String16&) {
+    int32_t, const std::string&) {
   // We don't implement onTorchStatusChanged
   return binder::Status::ok();
 }
 
 ::android::binder::Status AidlCameraServiceListener::onTorchStrengthLevelChanged(
-    const ::android::String16&, int32_t) {
+    const std::string&, int32_t) {
     // We don't implement onTorchStrengthLevelChanged
     return binder::Status::ok();
 }
diff --git a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
index 906dd8e..6483fe1 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/aidl/AidlCameraServiceListener.h
@@ -45,25 +45,25 @@
     ~AidlCameraServiceListener() = default;
 
     ::android::binder::Status onStatusChanged(int32_t status,
-            const ::android::String16& cameraId) override;
+            const std::string& cameraId) override;
     ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
-            const ::android::String16& cameraId,
-            const ::android::String16& physicalCameraId) override;
+            const std::string& cameraId,
+            const std::string& physicalCameraId) override;
 
     ::android::binder::Status onTorchStatusChanged(
-            int32_t status, const ::android::String16& cameraId) override;
+            int32_t status, const std::string& cameraId) override;
     ::android::binder::Status onTorchStrengthLevelChanged(
-            const ::android::String16& cameraId, int32_t newStrengthLevel) override;
+            const std::string& cameraId, int32_t newStrengthLevel) override;
     binder::Status onCameraAccessPrioritiesChanged() override {
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
-    binder::Status onCameraOpened(const ::android::String16& /*cameraId*/,
-            const ::android::String16& /*clientPackageId*/) override {
+    binder::Status onCameraOpened(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageId*/) override {
         // empty implementation
         return binder::Status::ok();
     }
-    binder::Status onCameraClosed(const ::android::String16& /*cameraId*/) override {
+    binder::Status onCameraClosed(const std::string& /*cameraId*/) override {
         // empty implementation
         return binder::Status::ok();
     }
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index 1b8e53b..7291c5f 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -22,6 +22,7 @@
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
+#include <camera/StringUtils.h>
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -80,9 +81,8 @@
         iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(nh)));
         native_handle_delete(nh);
     }
-    String16 physicalCameraId16(src.physicalCameraId.c_str());
     UOutputConfiguration outputConfiguration(
-        iGBPs, convertFromAidl(src.rotation), physicalCameraId16,
+        iGBPs, convertFromAidl(src.rotation), src.physicalCameraId,
         src.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
         (windowHandles.size() > 1));
     return outputConfiguration;
@@ -175,7 +175,7 @@
     dst.frameNumber = src.frameNumber;
     dst.partialResultCount = src.partialResultCount;
     dst.errorStreamId = src.errorStreamId;
-    dst.errorPhysicalCameraId = String8(src.errorPhysicalCameraId).string();
+    dst.errorPhysicalCameraId = src.errorPhysicalCameraId;
     return dst;
 }
 
@@ -217,7 +217,7 @@
 SPhysicalCaptureResultInfo convertToAidl(const UPhysicalCaptureResultInfo & src,
                                          std::shared_ptr<CaptureResultMetadataQueue> & fmq) {
     SPhysicalCaptureResultInfo dst;
-    dst.physicalCameraId = String8(src.mPhysicalCameraId).string();
+    dst.physicalCameraId = src.mPhysicalCameraId;
 
     const camera_metadata_t *rawMetadata = src.mPhysicalCameraMetadata.getAndLock();
     // Try using fmq at first.
@@ -242,12 +242,12 @@
     size_t i = 0;
     for (const auto &statusAndId : src) {
         auto &a = (*dst)[i++];
-        a.cameraId = statusAndId.cameraId.c_str();
+        a.cameraId = statusAndId.cameraId;
         a.deviceStatus = convertCameraStatusToAidl(statusAndId.status);
         size_t numUnvailPhysicalCameras = statusAndId.unavailablePhysicalIds.size();
         a.unavailPhysicalCameraIds.resize(numUnvailPhysicalCameras);
         for (size_t j = 0; j < numUnvailPhysicalCameras; j++) {
-            a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j].c_str();
+            a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j];
         }
     }
 }
@@ -302,4 +302,4 @@
     return OK;
 }
 
-} // namespace android::hardware::cameraservice::utils::conversion::aidl
\ No newline at end of file
+} // namespace android::hardware::cameraservice::utils::conversion::aidl
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 5b5892a..87a4420 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -18,11 +18,14 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <sstream>
+
 #include <inttypes.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
 #include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
@@ -53,9 +56,9 @@
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
-        const String16& clientPackageName,
-        const std::optional<String16>& clientFeatureId,
-        const String8& cameraDeviceId,
+        const std::string& clientPackageName,
+        const std::optional<std::string>& clientFeatureId,
+        const std::string& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
@@ -85,7 +88,8 @@
     l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
 }
 
-status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
+status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
+        const std::string& monitorTags) {
     return initializeImpl(manager, monitorTags);
 }
 
@@ -105,7 +109,7 @@
 }
 
 template<typename TProviderPtr>
-status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
+status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags)
 {
     ATRACE_CALL();
     ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
@@ -137,16 +141,11 @@
     CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
             &mRotateAndCropPreviewTransform);
 
-    String8 threadName;
-
     mStreamingProcessor = new StreamingProcessor(this);
-    threadName = String8::format("C2-%d-StreamProc",
-            mCameraId);
 
+    std::string threadName = std::string("C2-") + std::to_string(mCameraId);
     mFrameProcessor = new FrameProcessor(mDevice, this);
-    threadName = String8::format("C2-%d-FrameProc",
-            mCameraId);
-    res = mFrameProcessor->run(threadName.string());
+    res = mFrameProcessor->run((threadName + "-FrameProc").c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start frame processor thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -154,9 +153,7 @@
     }
 
     mCaptureSequencer = new CaptureSequencer(this);
-    threadName = String8::format("C2-%d-CaptureSeq",
-            mCameraId);
-    res = mCaptureSequencer->run(threadName.string());
+    res = mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -164,9 +161,7 @@
     }
 
     mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
-    threadName = String8::format("C2-%d-JpegProc",
-            mCameraId);
-    res = mJpegProcessor->run(threadName.string());
+    res = mJpegProcessor->run((threadName + "-JpegProc").c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -174,10 +169,7 @@
     }
 
     mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
-
-    threadName = String8::format("C2-%d-ZslProc",
-            mCameraId);
-    res = mZslProcessor->run(threadName.string());
+    res = mZslProcessor->run((threadName + "-ZslProc").c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -185,9 +177,7 @@
     }
 
     mCallbackProcessor = new CallbackProcessor(this);
-    threadName = String8::format("C2-%d-CallbkProc",
-            mCameraId);
-    res = mCallbackProcessor->run(threadName.string());
+    res = mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start callback processor thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -220,47 +210,47 @@
 }
 
 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
-    String8 result;
-    result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
+    std::ostringstream result;
+    result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
             (getRemoteCallback() != NULL ?
-                    (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
+                    (void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
             mClientPid);
-    result.append("  State: ");
-#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
+    result << "  State: ";
+#define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
 
     const Parameters& p = mParameters.unsafeAccess();
 
-    result.append(Parameters::getStateName(p.state));
+    result << Parameters::getStateName(p.state);
 
-    result.append("\n  Current parameters:\n");
-    result.appendFormat("    Preview size: %d x %d\n",
+    result << "\n  Current parameters:\n";
+    result << fmt::sprintf("    Preview size: %d x %d\n",
             p.previewWidth, p.previewHeight);
-    result.appendFormat("    Preview FPS range: %d - %d\n",
+    result << fmt::sprintf("    Preview FPS range: %d - %d\n",
             p.previewFpsRange[0], p.previewFpsRange[1]);
-    result.appendFormat("    Preview HAL pixel format: 0x%x\n",
+    result << fmt::sprintf("    Preview HAL pixel format: 0x%x\n",
             p.previewFormat);
-    result.appendFormat("    Preview transform: %x\n",
+    result << fmt::sprintf("    Preview transform: %x\n",
             p.previewTransform);
-    result.appendFormat("    Picture size: %d x %d\n",
+    result << fmt::sprintf("    Picture size: %d x %d\n",
             p.pictureWidth, p.pictureHeight);
-    result.appendFormat("    Jpeg thumbnail size: %d x %d\n",
+    result << fmt::sprintf("    Jpeg thumbnail size: %d x %d\n",
             p.jpegThumbSize[0], p.jpegThumbSize[1]);
-    result.appendFormat("    Jpeg quality: %d, thumbnail quality: %d\n",
+    result << fmt::sprintf("    Jpeg quality: %d, thumbnail quality: %d\n",
             p.jpegQuality, p.jpegThumbQuality);
-    result.appendFormat("    Jpeg rotation: %d\n", p.jpegRotation);
-    result.appendFormat("    GPS tags %s\n",
+    result << fmt::sprintf("    Jpeg rotation: %d\n", p.jpegRotation);
+    result << fmt::sprintf("    GPS tags %s\n",
             p.gpsEnabled ? "enabled" : "disabled");
     if (p.gpsEnabled) {
-        result.appendFormat("    GPS lat x long x alt: %f x %f x %f\n",
+        result << fmt::sprintf("    GPS lat x long x alt: %f x %f x %f\n",
                 p.gpsCoordinates[0], p.gpsCoordinates[1],
                 p.gpsCoordinates[2]);
-        result.appendFormat("    GPS timestamp: %" PRId64 "\n",
+        result << fmt::sprintf("    GPS timestamp: %" PRId64 "\n",
                 p.gpsTimestamp);
-        result.appendFormat("    GPS processing method: %s\n",
+        result << fmt::sprintf("    GPS processing method: %s\n",
                 p.gpsProcessingMethod.string());
     }
 
-    result.append("    White balance mode: ");
+    result << "    White balance mode: ";
     switch (p.wbMode) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
@@ -270,10 +260,10 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Effect mode: ");
+    result << "    Effect mode: ";
     switch (p.effectMode) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
@@ -284,22 +274,22 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Antibanding mode: ");
+    result << "    Antibanding mode: ";
     switch (p.antibandingMode) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Scene mode: ");
+    result << "    Scene mode: ";
     switch (p.sceneMode) {
         case ANDROID_CONTROL_SCENE_MODE_DISABLED:
-            result.append("AUTO\n"); break;
+            result << "AUTO\n"; break;
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
@@ -316,10 +306,10 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Flash mode: ");
+    result << "    Flash mode: ";
     switch (p.flashMode) {
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
@@ -327,10 +317,10 @@
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Focus mode: ");
+    result << "    Focus mode: ";
     switch (p.focusMode) {
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
@@ -340,10 +330,10 @@
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("   Focus state: ");
+    result << "   Focus state: ";
     switch (p.focusState) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
@@ -352,12 +342,12 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Focusing areas:\n");
+    result << "    Focusing areas:\n";
     for (size_t i = 0; i < p.focusingAreas.size(); i++) {
-        result.appendFormat("      [ (%d, %d, %d, %d), weight %d ]\n",
+        result << fmt::sprintf("      [ (%d, %d, %d, %d), weight %d ]\n",
                 p.focusingAreas[i].left,
                 p.focusingAreas[i].top,
                 p.focusingAreas[i].right,
@@ -365,16 +355,16 @@
                 p.focusingAreas[i].weight);
     }
 
-    result.appendFormat("    Exposure compensation index: %d\n",
+    result << fmt::sprintf("    Exposure compensation index: %d\n",
             p.exposureCompensation);
 
-    result.appendFormat("    AE lock %s, AWB lock %s\n",
+    result << fmt::sprintf("    AE lock %s, AWB lock %s\n",
             p.autoExposureLock ? "enabled" : "disabled",
             p.autoWhiteBalanceLock ? "enabled" : "disabled" );
 
-    result.appendFormat("    Metering areas:\n");
+    result << "    Metering areas:\n";
     for (size_t i = 0; i < p.meteringAreas.size(); i++) {
-        result.appendFormat("      [ (%d, %d, %d, %d), weight %d ]\n",
+        result << fmt::sprintf("      [ (%d, %d, %d, %d), weight %d ]\n",
                 p.meteringAreas[i].left,
                 p.meteringAreas[i].top,
                 p.meteringAreas[i].right,
@@ -382,54 +372,56 @@
                 p.meteringAreas[i].weight);
     }
 
-    result.appendFormat("    Zoom index: %d\n", p.zoom);
-    result.appendFormat("    Video size: %d x %d\n", p.videoWidth,
+    result << fmt::sprintf("    Zoom index: %d\n", p.zoom);
+    result << fmt::sprintf("    Video size: %d x %d\n", p.videoWidth,
             p.videoHeight);
 
-    result.appendFormat("    Recording hint is %s\n",
+    result << fmt::sprintf("    Recording hint is %s\n",
             p.recordingHint ? "set" : "not set");
 
-    result.appendFormat("    Video stabilization is %s\n",
+    result << fmt::sprintf("    Video stabilization is %s\n",
             p.videoStabilization ? "enabled" : "disabled");
 
-    result.appendFormat("    Selected still capture FPS range: %d - %d\n",
+    result << fmt::sprintf("    Selected still capture FPS range: %d - %d\n",
             p.fastInfo.bestStillCaptureFpsRange[0],
             p.fastInfo.bestStillCaptureFpsRange[1]);
 
-    result.appendFormat("    Use zero shutter lag: %s\n",
+    result << fmt::sprintf("    Use zero shutter lag: %s\n",
             p.useZeroShutterLag() ? "yes" : "no");
 
-    result.append("  Current streams:\n");
-    result.appendFormat("    Preview stream ID: %d\n",
+    result << "  Current streams:\n";
+    result << fmt::sprintf("    Preview stream ID: %d\n",
             getPreviewStreamId());
-    result.appendFormat("    Capture stream ID: %d\n",
+    result << fmt::sprintf("    Capture stream ID: %d\n",
             getCaptureStreamId());
-    result.appendFormat("    Recording stream ID: %d\n",
+    result << fmt::sprintf("    Recording stream ID: %d\n",
             getRecordingStreamId());
 
-    result.append("  Quirks for this camera:\n");
+    result << "  Quirks for this camera:\n";
     bool haveQuirk = false;
     if (p.quirks.triggerAfWithAuto) {
-        result.appendFormat("    triggerAfWithAuto\n");
+        result << "    triggerAfWithAuto\n";
         haveQuirk = true;
     }
     if (p.quirks.useZslFormat) {
-        result.appendFormat("    useZslFormat\n");
+        result << "    useZslFormat\n";
         haveQuirk = true;
     }
     if (p.quirks.meteringCropRegion) {
-        result.appendFormat("    meteringCropRegion\n");
+        result << "    meteringCropRegion\n";
         haveQuirk = true;
     }
     if (p.quirks.partialResults) {
-        result.appendFormat("    usePartialResult\n");
+        result << "    usePartialResult\n";
         haveQuirk = true;
     }
     if (!haveQuirk) {
-        result.appendFormat("    none\n");
+        result << "    none\n";
     }
 
-    write(fd, result.string(), result.size());
+    std::string resultStr = std::move(result.str());
+
+    write(fd, resultStr.c_str(), resultStr.size());
 
     mStreamingProcessor->dump(fd, args);
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index a7ea823..9ec1eb5 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -103,9 +103,9 @@
     Camera2Client(const sp<CameraService>& cameraService,
             const sp<hardware::ICameraClient>& cameraClient,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
-            const String16& clientPackageName,
-            const std::optional<String16>& clientFeatureId,
-            const String8& cameraDeviceId,
+            const std::string& clientPackageName,
+            const std::optional<std::string>& clientFeatureId,
+            const std::string& cameraDeviceId,
             int api1CameraId,
             int cameraFacing,
             int sensorOrientation,
@@ -119,7 +119,7 @@
     virtual ~Camera2Client();
 
     virtual status_t initialize(sp<CameraProviderManager> manager,
-            const String8& monitorTags) override;
+            const std::string& monitorTags) override;
 
     virtual status_t dump(int fd, const Vector<String16>& args);
 
@@ -244,7 +244,7 @@
     status_t overrideVideoSnapshotSize(Parameters &params);
 
     template<typename TProviderPtr>
-    status_t initializeImpl(TProviderPtr providerPtr, const String8& monitorTags);
+    status_t initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags);
 
     bool isZslEnabledInStillTemplate();
     // The current rotate & crop mode passed by camera service
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index ee764ec..17db20b 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -158,7 +158,7 @@
         res = device->createStream(mCallbackWindow,
                 params.previewWidth, params.previewHeight, callbackFormat,
                 HAL_DATASPACE_V0_JFIF, CAMERA_STREAM_ROTATION_0, &mCallbackStreamId,
-                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+                std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index 4c9b7ed..0b5e03f 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -23,6 +23,7 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <utils/Vector.h>
+#include <camera/StringUtils.h>
 
 #include "api1/Camera2Client.h"
 #include "api1/client2/CaptureSequencer.h"
@@ -174,19 +175,19 @@
 
 
 void CaptureSequencer::dump(int fd, const Vector<String16>& /*args*/) {
-    String8 result;
+    std::string result;
     if (mCaptureRequest.entryCount() != 0) {
         result = "    Capture request:\n";
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         mCaptureRequest.dump(fd, 2, 6);
     } else {
         result = "    Capture request: undefined\n";
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
     }
-    result = String8::format("    Current capture state: %s\n",
+    result = fmt::sprintf("    Current capture state: %s\n",
             kStateNames[mCaptureState]);
-    result.append("    Latest captured frame:\n");
-    write(fd, result.string(), result.size());
+    result += "    Latest captured frame:\n";
+    write(fd, result.c_str(), result.size());
     mNewFrame.dump(fd, 2, 6);
 }
 
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 467108d..eb00bf8 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -83,7 +83,7 @@
     }
 
     // Find out buffer size for JPEG
-    ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(String8("")),
+    ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(""),
             params.pictureWidth, params.pictureHeight);
     if (maxJpegSize <= 0) {
         ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
@@ -157,7 +157,7 @@
                 params.pictureWidth, params.pictureHeight,
                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
                 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
-                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+                std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for capture: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 2d3597c..ff71e6b 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -31,6 +31,7 @@
 #include <gui/BufferItem.h>
 #include <gui/Surface.h>
 #include <media/hardware/HardwareAPI.h>
+#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -198,7 +199,7 @@
         res = device->createStream(mPreviewWindow,
                 params.previewWidth, params.previewHeight,
                 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_UNKNOWN,
-                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8(),
+                CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, std::string(),
                 std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
@@ -385,7 +386,7 @@
                 params.videoWidth, params.videoHeight,
                 params.videoFormat, params.videoDataSpace,
                 CAMERA_STREAM_ROTATION_0, &mRecordingStreamId,
-                String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+                std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
                     "%s (%d)", __FUNCTION__, mId,
@@ -585,21 +586,21 @@
 }
 
 status_t StreamingProcessor::dump(int fd, const Vector<String16>& /*args*/) {
-    String8 result;
+    std::string result;
 
-    result.append("  Current requests:\n");
+    result += "  Current requests:\n";
     if (mPreviewRequest.entryCount() != 0) {
-        result.append("    Preview request:\n");
-        write(fd, result.string(), result.size());
+        result += "    Preview request:\n";
+        write(fd, result.c_str(), result.size());
         mPreviewRequest.dump(fd, 2, 6);
         result.clear();
     } else {
-        result.append("    Preview request: undefined\n");
+        result += "    Preview request: undefined\n";
     }
 
     if (mRecordingRequest.entryCount() != 0) {
         result = "    Recording request:\n";
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         mRecordingRequest.dump(fd, 2, 6);
         result.clear();
     } else {
@@ -609,11 +610,11 @@
     const char* streamTypeString[] = {
         "none", "preview", "record"
     };
-    result.append(String8::format("   Active request: %s (paused: %s)\n",
-                                  streamTypeString[mActiveRequest],
-                                  mPaused ? "yes" : "no"));
+    result += fmt::sprintf("   Active request: %s (paused: %s\n",
+            streamTypeString[mActiveRequest],
+            mPaused ? "yes" : "no");
 
-    write(fd, result.string(), result.size());
+    write(fd, result.c_str(), result.size());
 
     return OK;
 }
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 1321e6b..d6c2415 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -30,6 +30,7 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <gui/Surface.h>
+#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -255,13 +256,13 @@
         BufferQueue::createBufferQueue(&producer, &consumer);
         mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
             mBufferQueueDepth);
-        mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
+        mProducer->setName("Camera2-ZslRingBufferConsumer");
         sp<Surface> outSurface = new Surface(producer);
 
         res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
             params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
             HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
-            String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+            std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create ZSL stream: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -680,12 +681,12 @@
 void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
     Mutex::Autolock l(mInputMutex);
     if (!mLatestCapturedRequest.isEmpty()) {
-        String8 result("    Latest ZSL capture request:\n");
-        write(fd, result.string(), result.size());
+        std::string result = "    Latest ZSL capture request:\n";
+        write(fd, result.c_str(), result.size());
         mLatestCapturedRequest.dump(fd, 2, 6);
     } else {
-        String8 result("    Latest ZSL capture request: none yet\n");
-        write(fd, result.string(), result.size());
+        std::string result = "    Latest ZSL capture request: none yet\n";
+        write(fd, result.c_str(), result.size());
     }
     dumpZslQueue(fd);
 }
@@ -706,12 +707,12 @@
 }
 
 void ZslProcessor::dumpZslQueue(int fd) const {
-    String8 header("ZSL queue contents:");
-    String8 indent("    ");
-    ALOGV("%s", header.string());
+    std::string header = "ZSL queue contents:";
+    std::string indent = "    ";
+    ALOGV("%s", header.c_str());
     if (fd != -1) {
         header = indent + header + "\n";
-        write(fd, header.string(), header.size());
+        write(fd, header.c_str(), header.size());
     }
     for (size_t i = 0; i < mZslQueue.size(); i++) {
         const ZslPair &queueEntry = mZslQueue[i];
@@ -725,13 +726,13 @@
             entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
             if (entry.count > 0) frameAeState = entry.data.u8[0];
         }
-        String8 result =
-                String8::format("   %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
+        std::string result =
+                fmt::sprintf("   %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
                         bufferTimestamp, frameTimestamp, frameAeState);
-        ALOGV("%s", result.string());
+        ALOGV("%s", result.c_str());
         if (fd != -1) {
             result = indent + result + "\n";
-            write(fd, result.string(), result.size());
+            write(fd, result.c_str(), result.size());
         }
 
     }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 1720b55..939f969 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -26,6 +26,7 @@
 #include <gui/Surface.h>
 #include <camera/camera2/CaptureRequest.h>
 #include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "device3/Camera3Device.h"
@@ -42,12 +43,12 @@
 
 #define STATUS_ERROR(errorCode, errorString) \
     binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+            fmt::sprintf("%s:%d: %s", __FUNCTION__, __LINE__, errorString).c_str())
 
 #define STATUS_ERROR_FMT(errorCode, errorString, ...) \
     binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
-                    __VA_ARGS__))
+            fmt::sprintf("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+                    __VA_ARGS__).c_str())
 
 namespace android {
 using namespace camera2;
@@ -57,10 +58,10 @@
 CameraDeviceClientBase::CameraDeviceClientBase(
         const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
-        const String16& clientPackageName,
+        const std::string& clientPackageName,
         bool systemNativeClient,
-        const std::optional<String16>& clientFeatureId,
-        const String8& cameraId,
+        const std::optional<std::string>& clientFeatureId,
+        const std::string& cameraId,
         [[maybe_unused]] int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
@@ -88,10 +89,10 @@
 CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
         const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
-        const String16& clientPackageName,
+        const std::string& clientPackageName,
         bool systemNativeClient,
-        const std::optional<String16>& clientFeatureId,
-        const String8& cameraId,
+        const std::optional<std::string>& clientFeatureId,
+        const std::string& cameraId,
         int cameraFacing,
         int sensorOrientation,
         int clientPid,
@@ -99,7 +100,7 @@
         int servicePid,
         bool overrideForPerfClass,
         bool overrideToPortrait,
-        const String8& originalCameraId) :
+        const std::string& originalCameraId) :
     Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
@@ -110,16 +111,17 @@
     mOverrideForPerfClass(overrideForPerfClass),
     mOriginalCameraId(originalCameraId) {
     ATRACE_CALL();
-    ALOGI("CameraDeviceClient %s: Opened", cameraId.string());
+    ALOGI("CameraDeviceClient %s: Opened", cameraId.c_str());
 }
 
 status_t CameraDeviceClient::initialize(sp<CameraProviderManager> manager,
-        const String8& monitorTags) {
+        const std::string& monitorTags) {
     return initializeImpl(manager, monitorTags);
 }
 
 template<typename TProviderPtr>
-status_t CameraDeviceClient::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags) {
+status_t CameraDeviceClient::initializeImpl(TProviderPtr providerPtr,
+        const std::string& monitorTags) {
     ATRACE_CALL();
     status_t res;
 
@@ -128,10 +130,9 @@
         return res;
     }
 
-    String8 threadName;
     mFrameProcessor = new FrameProcessorBase(mDevice);
-    threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
-    res = mFrameProcessor->run(threadName.string());
+    std::string threadName = std::string("CDU-") + mCameraIdStr + "-FrameProc";
+    res = mFrameProcessor->run(threadName.c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start frame processor thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -189,13 +190,13 @@
     mProviderManager = providerPtr;
     // Cache physical camera ids corresponding to this device and also the high
     // resolution sensors in this device + physical camera ids
-    mProviderManager->isLogicalCamera(mCameraIdStr.string(), &mPhysicalCameraIds);
+    mProviderManager->isLogicalCamera(mCameraIdStr, &mPhysicalCameraIds);
     if (supportsUltraHighResolutionCapture(mCameraIdStr)) {
-        mHighResolutionSensors.insert(mCameraIdStr.string());
+        mHighResolutionSensors.insert(mCameraIdStr);
     }
     for (auto &physicalId : mPhysicalCameraIds) {
-        if (supportsUltraHighResolutionCapture(String8(physicalId.c_str()))) {
-            mHighResolutionSensors.insert(physicalId.c_str());
+        if (supportsUltraHighResolutionCapture(physicalId)) {
+            mHighResolutionSensors.insert(physicalId);
         }
     }
     return OK;
@@ -223,7 +224,7 @@
     if (idx == NAME_NOT_FOUND) {
         ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
                 " we have not called createStream on",
-                __FUNCTION__, mCameraIdStr.string());
+                __FUNCTION__, mCameraIdStr.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Request targets Surface that is not part of current capture session");
     } else if ((compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp)))
@@ -240,7 +241,7 @@
     (*outSurfaceMap)[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
 
     ALOGV("%s: Camera %s: Appending output stream %d surface %d to request",
-            __FUNCTION__, mCameraIdStr.string(), streamSurfaceId.streamId(),
+            __FUNCTION__, mCameraIdStr.c_str(), streamSurfaceId.streamId(),
             streamSurfaceId.surfaceId());
 
     if (currentStreamId != nullptr) {
@@ -283,7 +284,7 @@
 
     if (requests.empty()) {
         ALOGE("%s: Camera %s: Sent null request. Rejecting request.",
-              __FUNCTION__, mCameraIdStr.string());
+              __FUNCTION__, mCameraIdStr.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Empty request list");
     }
 
@@ -296,19 +297,19 @@
         if (request.mIsReprocess) {
             if (!mInputStream.configured) {
                 ALOGE("%s: Camera %s: no input stream is configured.", __FUNCTION__,
-                        mCameraIdStr.string());
+                        mCameraIdStr.c_str());
                 return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "No input configured for camera %s but request is for reprocessing",
-                        mCameraIdStr.string());
+                        mCameraIdStr.c_str());
             } else if (streaming) {
                 ALOGE("%s: Camera %s: streaming reprocess requests not supported.", __FUNCTION__,
-                        mCameraIdStr.string());
+                        mCameraIdStr.c_str());
                 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Repeating reprocess requests not supported");
             } else if (request.mPhysicalCameraSettings.size() > 1) {
                 ALOGE("%s: Camera %s: reprocess requests not supported for "
                         "multiple physical cameras.", __FUNCTION__,
-                        mCameraIdStr.string());
+                        mCameraIdStr.c_str());
                 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Reprocess requests not supported for multiple cameras");
             }
@@ -316,23 +317,23 @@
 
         if (request.mPhysicalCameraSettings.empty()) {
             ALOGE("%s: Camera %s: request doesn't contain any settings.", __FUNCTION__,
-                    mCameraIdStr.string());
+                    mCameraIdStr.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                     "Request doesn't contain any settings");
         }
 
         //The first capture settings should always match the logical camera id
-        String8 logicalId(request.mPhysicalCameraSettings.begin()->id.c_str());
+        const std::string &logicalId = request.mPhysicalCameraSettings.begin()->id;
         if (mDevice->getId() != logicalId && mOriginalCameraId != logicalId) {
             ALOGE("%s: Camera %s: Invalid camera request settings.", __FUNCTION__,
-                    mCameraIdStr.string());
+                    mCameraIdStr.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                     "Invalid camera request settings");
         }
 
         if (request.mSurfaceList.isEmpty() && request.mStreamIdxList.size() == 0) {
             ALOGE("%s: Camera %s: Requests must have at least one surface target. "
-                    "Rejecting request.", __FUNCTION__, mCameraIdStr.string());
+                    "Rejecting request.", __FUNCTION__, mCameraIdStr.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                     "Request has no output targets");
         }
@@ -358,9 +359,9 @@
 
                 ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
                 if (index >= 0) {
-                    String8 requestedPhysicalId(
-                            mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
-                    requestedPhysicalIds.push_back(requestedPhysicalId.string());
+                    const std::string &requestedPhysicalId =
+                            mConfiguredOutputs.valueAt(index).getPhysicalCameraId();
+                    requestedPhysicalIds.push_back(requestedPhysicalId);
                     dynamicProfileBitmap |=
                             mConfiguredOutputs.valueAt(index).getDynamicRangeProfile();
                 } else {
@@ -376,7 +377,7 @@
                 if (index < 0) {
                     ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
                             " we have not called createStream on: stream %d",
-                            __FUNCTION__, mCameraIdStr.string(), streamId);
+                            __FUNCTION__, mCameraIdStr.c_str(), streamId);
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Request targets Surface that is not part of current capture session");
                 }
@@ -385,7 +386,7 @@
                 if ((size_t)surfaceIdx >= gbps.size()) {
                     ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
                             " we have not called createStream on: stream %d, surfaceIdx %d",
-                            __FUNCTION__, mCameraIdStr.string(), streamId, surfaceIdx);
+                            __FUNCTION__, mCameraIdStr.c_str(), streamId, surfaceIdx);
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Request targets Surface has invalid surface index");
                 }
@@ -395,9 +396,9 @@
                     return res;
                 }
 
-                String8 requestedPhysicalId(
-                        mConfiguredOutputs.valueAt(index).getPhysicalCameraId());
-                requestedPhysicalIds.push_back(requestedPhysicalId.string());
+                const std::string &requestedPhysicalId =
+                        mConfiguredOutputs.valueAt(index).getPhysicalCameraId();
+                requestedPhysicalIds.push_back(requestedPhysicalId);
                 dynamicProfileBitmap |=
                         mConfiguredOutputs.valueAt(index).getDynamicRangeProfile();
             }
@@ -419,7 +420,7 @@
                     } else {
                         ALOGE("%s: Camera %s: Tried to submit a request with a surfaces that"
                                 " reference an unsupported dynamic range profile combination"
-                                " 0x%" PRIx64 "!", __FUNCTION__, mCameraIdStr.string(),
+                                " 0x%" PRIx64 "!", __FUNCTION__, mCameraIdStr.c_str(),
                                 dynamicProfileBitmap);
                         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                                 "Request targets an unsupported dynamic range profile"
@@ -428,7 +429,7 @@
                 } else {
                     ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
                             " references unsupported dynamic range profile 0x%x!",
-                            __FUNCTION__, mCameraIdStr.string(), i);
+                            __FUNCTION__, mCameraIdStr.c_str(), i);
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Request targets 10-bit Surface with unsupported dynamic range"
                             " profile");
@@ -438,11 +439,10 @@
 
         CameraDeviceBase::PhysicalCameraSettingsList physicalSettingsList;
         for (const auto& it : request.mPhysicalCameraSettings) {
-            std::string resolvedId = (
-                mOriginalCameraId.string() == it.id) ? mDevice->getId().string() : it.id;
+            const std::string resolvedId = (mOriginalCameraId == it.id) ? mDevice->getId() : it.id;
             if (it.settings.isEmpty()) {
                 ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
-                        __FUNCTION__, mCameraIdStr.string());
+                        __FUNCTION__, mCameraIdStr.c_str());
                 return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Request settings are empty");
             }
@@ -465,7 +465,7 @@
                 }
             }
 
-            String8 physicalId(resolvedId.c_str());
+            const std::string &physicalId = resolvedId;
             bool hasTestPatternModePhysicalKey = std::find(mSupportedPhysicalRequestKeys.begin(),
                     mSupportedPhysicalRequestKeys.end(), ANDROID_SENSOR_TEST_PATTERN_MODE) !=
                     mSupportedPhysicalRequestKeys.end();
@@ -477,7 +477,7 @@
                         resolvedId);
                 if (found == requestedPhysicalIds.end()) {
                     ALOGE("%s: Camera %s: Physical camera id: %s not part of attached outputs.",
-                            __FUNCTION__, mCameraIdStr.string(), physicalId.string());
+                            __FUNCTION__, mCameraIdStr.c_str(), physicalId.c_str());
                     return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                             "Invalid physical camera id");
                 }
@@ -523,7 +523,7 @@
                 &(submitInfo->mRequestId), /*size*/1);
         loopCounter++; // loopCounter starts from 1
         ALOGV("%s: Camera %s: Creating request with ID %d (%d of %zu)",
-                __FUNCTION__, mCameraIdStr.string(), submitInfo->mRequestId,
+                __FUNCTION__, mCameraIdStr.c_str(), submitInfo->mRequestId,
                 loopCounter, requests.size());
 
         metadataRequestList.push_back(physicalSettingsList);
@@ -546,12 +546,12 @@
         err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
                 &(submitInfo->mLastFrameNumber));
         if (err != OK) {
-            String8 msg = String8::format(
+            std::string msg = fmt::sprintf(
                 "Camera %s:  Got error %s (%d) after trying to set streaming request",
-                mCameraIdStr.string(), strerror(-err), err);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
+                mCameraIdStr.c_str(), strerror(-err), err);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
-                    msg.string());
+                    msg.c_str());
         } else {
             Mutex::Autolock idLock(mStreamingRequestIdLock);
             mStreamingRequestId = submitInfo->mRequestId;
@@ -560,17 +560,17 @@
         err = mDevice->captureList(metadataRequestList, surfaceMapList,
                 &(submitInfo->mLastFrameNumber));
         if (err != OK) {
-            String8 msg = String8::format(
+            std::string msg = fmt::sprintf(
                 "Camera %s: Got error %s (%d) after trying to submit capture request",
-                mCameraIdStr.string(), strerror(-err), err);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
+                mCameraIdStr.c_str(), strerror(-err), err);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
-                    msg.string());
+                    msg.c_str());
         }
         ALOGV("%s: requestId = %d ", __FUNCTION__, submitInfo->mRequestId);
     }
 
-    ALOGV("%s: Camera %s: End of function", __FUNCTION__, mCameraIdStr.string());
+    ALOGV("%s: Camera %s: End of function", __FUNCTION__, mCameraIdStr.c_str());
     return res;
 }
 
@@ -594,22 +594,22 @@
 
     Mutex::Autolock idLock(mStreamingRequestIdLock);
     if (mStreamingRequestId != requestId) {
-        String8 msg = String8::format("Camera %s: Canceling request ID %d doesn't match "
-                "current request ID %d", mCameraIdStr.string(), requestId, mStreamingRequestId);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Canceling request ID %d doesn't match "
+                "current request ID %d", mCameraIdStr.c_str(), requestId, mStreamingRequestId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     err = mDevice->clearStreamingRequest(lastFrameNumber);
 
     if (err == OK) {
         ALOGV("%s: Camera %s: Successfully cleared streaming request",
-                __FUNCTION__, mCameraIdStr.string());
+                __FUNCTION__, mCameraIdStr.c_str());
         mStreamingRequestId = REQUEST_ID_NONE;
     } else {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error clearing streaming request: %s (%d)",
-                mCameraIdStr.string(), strerror(-err), err);
+                mCameraIdStr.c_str(), strerror(-err), err);
     }
 
     return res;
@@ -634,9 +634,9 @@
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
 
     if (offlineStreamIds == nullptr) {
-        String8 msg = String8::format("Invalid offline stream ids");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = "Invalid offline stream ids";
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     Mutex::Autolock icl(mBinderSerializationLock);
@@ -653,15 +653,15 @@
 
     status_t err = mDevice->configureStreams(sessionParams, operatingMode);
     if (err == BAD_VALUE) {
-        String8 msg = String8::format("Camera %s: Unsupported set of inputs/outputs provided",
-                mCameraIdStr.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Unsupported set of inputs/outputs provided",
+                mCameraIdStr.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     } else if (err != OK) {
-        String8 msg = String8::format("Camera %s: Error configuring streams: %s (%d)",
-                mCameraIdStr.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Error configuring streams: %s (%d)",
+                mCameraIdStr.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     } else {
         offlineStreamIds->clear();
         mDevice->getOfflineStreamIds(offlineStreamIds);
@@ -670,10 +670,10 @@
         for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
             err = mCompositeStreamMap.valueAt(i)->configureStream();
             if (err != OK) {
-                String8 msg = String8::format("Camera %s: Error configuring composite "
-                        "streams: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
-                ALOGE("%s: %s", __FUNCTION__, msg.string());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+                std::string msg = fmt::sprintf("Camera %s: Error configuring composite "
+                        "streams: %s (%d)", mCameraIdStr.c_str(), strerror(-err), err);
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
                 break;
             }
 
@@ -732,35 +732,36 @@
     }
 
     if (status == nullptr) {
-        String8 msg = String8::format( "Camera %s: Invalid status!", mCameraIdStr.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf( "Camera %s: Invalid status!", mCameraIdStr.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     *status = false;
-    camera3::metadataGetter getMetadata = [this](const String8 &id, bool /*overrideForPerfClass*/) {
+    camera3::metadataGetter getMetadata = [this](const std::string &id,
+            bool /*overrideForPerfClass*/) {
           return mDevice->infoPhysical(id);};
-    ret = mProviderManager->isSessionConfigurationSupported(mCameraIdStr.string(),
+    ret = mProviderManager->isSessionConfigurationSupported(mCameraIdStr.c_str(),
             sessionConfiguration, mOverrideForPerfClass, getMetadata, status);
     switch (ret) {
         case OK:
             // Expected, do nothing.
             break;
         case INVALID_OPERATION: {
-                String8 msg = String8::format(
+                std::string msg = fmt::sprintf(
                         "Camera %s: Session configuration query not supported!",
-                        mCameraIdStr.string());
-                ALOGD("%s: %s", __FUNCTION__, msg.string());
-                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+                        mCameraIdStr.c_str());
+                ALOGD("%s: %s", __FUNCTION__, msg.c_str());
+                res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
             }
 
             break;
         default: {
-                String8 msg = String8::format( "Camera %s: Error: %s (%d)", mCameraIdStr.string(),
+                std::string msg = fmt::sprintf( "Camera %s: Error: %s (%d)", mCameraIdStr.c_str(),
                         strerror(-ret), ret);
-                ALOGE("%s: %s", __FUNCTION__, msg.string());
+                ALOGE("%s: %s", __FUNCTION__, msg.c_str());
                 res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                        msg.string());
+                        msg.c_str());
             }
     }
 
@@ -812,10 +813,10 @@
         }
 
         if (surfaces.empty() && dIndex == NAME_NOT_FOUND) {
-            String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no such"
-                    " stream created yet", mCameraIdStr.string(), streamId);
-            ALOGW("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            std::string msg = fmt::sprintf("Camera %s: Invalid stream ID (%d) specified, no such"
+                    " stream created yet", mCameraIdStr.c_str(), streamId);
+            ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
     }
 
@@ -823,10 +824,10 @@
     status_t err = mDevice->deleteStream(streamId);
 
     if (err != OK) {
-        String8 msg = String8::format("Camera %s: Unexpected error %s (%d) when deleting stream %d",
-                mCameraIdStr.string(), strerror(-err), err, streamId);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Unexpected error %s (%d) when deleting stream "
+                "%d", mCameraIdStr.c_str(), strerror(-err), err, streamId);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     } else {
         if (isInput) {
             mInputStream.configured = false;
@@ -846,11 +847,11 @@
                 status_t ret;
                 if ((ret = mCompositeStreamMap.valueAt(compositeIndex)->deleteStream())
                         != OK) {
-                    String8 msg = String8::format("Camera %s: Unexpected error %s (%d) when "
-                            "deleting composite stream %d", mCameraIdStr.string(), strerror(-err), err,
-                            streamId);
-                    ALOGE("%s: %s", __FUNCTION__, msg.string());
-                    res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+                    std::string msg = fmt::sprintf("Camera %s: Unexpected error %s (%d) when "
+                            "deleting composite stream %d", mCameraIdStr.c_str(), strerror(-err),
+                            err, streamId);
+                    ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                    res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
                 }
                 mCompositeStreamMap.removeItemsAt(compositeIndex);
             }
@@ -883,7 +884,7 @@
     size_t numBufferProducers = bufferProducers.size();
     bool deferredConsumer = outputConfiguration.isDeferred();
     bool isShared = outputConfiguration.isShared();
-    String8 physicalCameraId = String8(outputConfiguration.getPhysicalCameraId());
+    const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
     bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
     bool isMultiResolution = outputConfiguration.isMultiResolution();
     int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
@@ -926,10 +927,11 @@
         sp<IBinder> binder = IInterface::asBinder(bufferProducer);
         ssize_t index = mStreamMap.indexOfKey(binder);
         if (index != NAME_NOT_FOUND) {
-            String8 msg = String8::format("Camera %s: Surface already has a stream created for it "
-                    "(ID %zd)", mCameraIdStr.string(), index);
-            ALOGW("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
+            std::string msg = std::string("Camera ") + mCameraIdStr
+                    + ": Surface already has a stream created for it (ID "
+                    + std::to_string(index) + ")";
+            ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.c_str());
         }
 
         sp<Surface> surface;
@@ -997,7 +999,7 @@
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
-                mCameraIdStr.string(), streamInfo.width, streamInfo.height, streamInfo.format,
+                mCameraIdStr.c_str(), streamInfo.width, streamInfo.height, streamInfo.format,
                 streamInfo.dataSpace, strerror(-err), err);
     } else {
         int i = 0;
@@ -1013,20 +1015,19 @@
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for output surface"
                     " (%d x %d) with format 0x%x.",
-                  __FUNCTION__, mCameraIdStr.string(), streamId, streamInfo.width,
+                  __FUNCTION__, mCameraIdStr.c_str(), streamId, streamInfo.width,
                   streamInfo.height, streamInfo.format);
 
         // Set transform flags to ensure preview to be rotated correctly.
         res = setStreamTransformLocked(streamId, streamInfo.mirrorMode);
 
         // Fill in mHighResolutionCameraIdToStreamIdSet map
-        const String8 &cameraIdUsed =
+        const std::string &cameraIdUsed =
                 physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
-        const char *cameraIdUsedCStr = cameraIdUsed.string();
         // Only needed for high resolution sensors
-        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+        if (mHighResolutionSensors.find(cameraIdUsed) !=
                 mHighResolutionSensors.end()) {
-            mHighResolutionCameraIdToStreamIdSet[cameraIdUsedCStr].insert(streamId);
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsed].insert(streamId);
         }
 
         *newStreamId = streamId;
@@ -1066,8 +1067,8 @@
     int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
     std::vector<sp<Surface>> noSurface;
     std::vector<int> surfaceIds;
-    String8 physicalCameraId(outputConfiguration.getPhysicalCameraId());
-    const String8 &cameraIdUsed =
+    const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
+    const std::string &cameraIdUsed =
             physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
     // Here, we override sensor pixel modes
     std::unordered_set<int32_t> overriddenSensorPixelModesUsed;
@@ -1096,7 +1097,7 @@
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
-                mCameraIdStr.string(), width, height, format, dataSpace, strerror(-err), err);
+                mCameraIdStr.c_str(), width, height, format, dataSpace, strerror(-err), err);
     } else {
         // Can not add streamId to mStreamMap here, as the surface is deferred. Add it to
         // a separate list to track. Once the deferred surface is set, this id will be
@@ -1113,18 +1114,17 @@
 
         ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                 " (%d x %d) stream with format 0x%x.",
-              __FUNCTION__, mCameraIdStr.string(), streamId, width, height, format);
+              __FUNCTION__, mCameraIdStr.c_str(), streamId, width, height, format);
 
         // Set transform flags to ensure preview to be rotated correctly.
         res = setStreamTransformLocked(streamId, outputConfiguration.getMirrorMode());
 
         *newStreamId = streamId;
         // Fill in mHighResolutionCameraIdToStreamIdSet
-        const char *cameraIdUsedCStr = cameraIdUsed.string();
         // Only needed for high resolution sensors
-        if (mHighResolutionSensors.find(cameraIdUsedCStr) !=
+        if (mHighResolutionSensors.find(cameraIdUsed) !=
                 mHighResolutionSensors.end()) {
-            mHighResolutionCameraIdToStreamIdSet[cameraIdUsed.string()].insert(streamId);
+            mHighResolutionCameraIdToStreamIdSet[cameraIdUsed].insert(streamId);
         }
     }
     return res;
@@ -1148,10 +1148,10 @@
 
     err = mDevice->setStreamTransform(streamId, transform);
     if (err != OK) {
-        String8 msg = String8::format("Failed to set stream transform (stream id %d)",
+        std::string msg = fmt::sprintf("Failed to set stream transform (stream id %d)",
                 streamId);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
 
     return res;
@@ -1176,10 +1176,10 @@
     }
 
     if (mInputStream.configured) {
-        String8 msg = String8::format("Camera %s: Already has an input stream "
-                "configured (ID %d)", mCameraIdStr.string(), mInputStream.id);
-        ALOGE("%s: %s", __FUNCTION__, msg.string() );
-        return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Already has an input stream "
+                "configured (ID %d)", mCameraIdStr.c_str(), mInputStream.id);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str() );
+        return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.c_str());
     }
 
     int streamId = -1;
@@ -1192,12 +1192,12 @@
         mInputStream.id = streamId;
 
         ALOGV("%s: Camera %s: Successfully created a new input stream ID %d",
-                __FUNCTION__, mCameraIdStr.string(), streamId);
+                __FUNCTION__, mCameraIdStr.c_str(), streamId);
 
         *newStreamId = streamId;
     } else {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
-                "Camera %s: Error creating new input stream: %s (%d)", mCameraIdStr.string(),
+                "Camera %s: Error creating new input stream: %s (%d)", mCameraIdStr.c_str(),
                 strerror(-err), err);
     }
 
@@ -1222,9 +1222,9 @@
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error getting input Surface: %s (%d)",
-                mCameraIdStr.string(), strerror(-err), err);
+                mCameraIdStr.c_str(), strerror(-err), err);
     } else {
-        inputSurface->name = String16("CameraInput");
+        inputSurface->name = toString16("CameraInput");
         inputSurface->graphicBufferProducer = producer;
     }
     return res;
@@ -1245,7 +1245,7 @@
 
     const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
-    String8 physicalCameraId(outputConfiguration.getPhysicalCameraId());
+    const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
 
     auto producerCount = bufferProducers.size();
     if (producerCount == 0) {
@@ -1332,12 +1332,12 @@
             case -EBUSY:
                 res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                         "Camera %s: Error updating stream: %s (%d)",
-                        mCameraIdStr.string(), strerror(ret), ret);
+                        mCameraIdStr.c_str(), strerror(ret), ret);
                 break;
             default:
                 res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                         "Camera %s: Error updating stream: %s (%d)",
-                        mCameraIdStr.string(), strerror(ret), ret);
+                        mCameraIdStr.c_str(), strerror(ret), ret);
                 break;
         }
     } else {
@@ -1353,7 +1353,7 @@
         mConfiguredOutputs.replaceValueFor(streamId, outputConfiguration);
 
         ALOGV("%s: Camera %s: Successful stream ID %d update",
-                  __FUNCTION__, mCameraIdStr.string(), streamId);
+                  __FUNCTION__, mCameraIdStr.c_str(), streamId);
     }
 
     return res;
@@ -1388,12 +1388,12 @@
     } else if (err == BAD_VALUE) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Template ID %d is invalid or not supported: %s (%d)",
-                mCameraIdStr.string(), templateId, strerror(-err), err);
+                mCameraIdStr.c_str(), templateId, strerror(-err), err);
 
     } else {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error creating default request for template %d: %s (%d)",
-                mCameraIdStr.string(), templateId, strerror(-err), err);
+                mCameraIdStr.c_str(), templateId, strerror(-err), err);
     }
     return res;
 }
@@ -1440,17 +1440,17 @@
     // FIXME: Also need check repeating burst.
     Mutex::Autolock idLock(mStreamingRequestIdLock);
     if (mStreamingRequestId != REQUEST_ID_NONE) {
-        String8 msg = String8::format(
+        std::string msg = fmt::sprintf(
             "Camera %s: Try to waitUntilIdle when there are active streaming requests",
-            mCameraIdStr.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+            mCameraIdStr.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
     status_t err = mDevice->waitUntilDrained();
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error waiting to drain: %s (%d)",
-                mCameraIdStr.string(), strerror(-err), err);
+                mCameraIdStr.c_str(), strerror(-err), err);
     }
     ALOGV("%s Done", __FUNCTION__);
     return res;
@@ -1476,7 +1476,8 @@
     status_t err = mDevice->flush(lastFrameNumber);
     if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
-                "Camera %s: Error flushing device: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
+                "Camera %s: Error flushing device: %s (%d)", mCameraIdStr.c_str(), strerror(-err),
+                err);
     }
     return res;
 }
@@ -1500,10 +1501,10 @@
     }
 
     if (index == NAME_NOT_FOUND) {
-        String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no stream "
-              "with that ID exists", mCameraIdStr.string(), streamId);
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Invalid stream ID (%d) specified, no stream "
+              "with that ID exists", mCameraIdStr.c_str(), streamId);
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     // Also returns BAD_VALUE if stream ID was not valid, or stream already
@@ -1512,10 +1513,10 @@
     if (err == BAD_VALUE) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Stream %d has already been used, and cannot be prepared",
-                mCameraIdStr.string(), streamId);
+                mCameraIdStr.c_str(), streamId);
     } else if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
-                "Camera %s: Error preparing stream %d: %s (%d)", mCameraIdStr.string(), streamId,
+                "Camera %s: Error preparing stream %d: %s (%d)", mCameraIdStr.c_str(), streamId,
                 strerror(-err), err);
     }
     return res;
@@ -1540,17 +1541,17 @@
     }
 
     if (index == NAME_NOT_FOUND) {
-        String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no stream "
-              "with that ID exists", mCameraIdStr.string(), streamId);
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Invalid stream ID (%d) specified, no stream "
+              "with that ID exists", mCameraIdStr.c_str(), streamId);
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     if (maxCount <= 0) {
-        String8 msg = String8::format("Camera %s: maxCount (%d) must be greater than 0",
-                mCameraIdStr.string(), maxCount);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: maxCount (%d) must be greater than 0",
+                mCameraIdStr.c_str(), maxCount);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     // Also returns BAD_VALUE if stream ID was not valid, or stream already
@@ -1559,10 +1560,10 @@
     if (err == BAD_VALUE) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Stream %d has already been used, and cannot be prepared",
-                mCameraIdStr.string(), streamId);
+                mCameraIdStr.c_str(), streamId);
     } else if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
-                "Camera %s: Error preparing stream %d: %s (%d)", mCameraIdStr.string(), streamId,
+                "Camera %s: Error preparing stream %d: %s (%d)", mCameraIdStr.c_str(), streamId,
                 strerror(-err), err);
     }
 
@@ -1588,10 +1589,10 @@
     }
 
     if (index == NAME_NOT_FOUND) {
-        String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no stream "
-              "with that ID exists", mCameraIdStr.string(), streamId);
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Invalid stream ID (%d) specified, no stream "
+              "with that ID exists", mCameraIdStr.c_str(), streamId);
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     // Also returns BAD_VALUE if stream ID was not valid or if the stream is in
@@ -1600,10 +1601,10 @@
     if (err == BAD_VALUE) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Stream %d is still in use, cannot be torn down",
-                mCameraIdStr.string(), streamId);
+                mCameraIdStr.c_str(), streamId);
     } else if (err != OK) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
-                "Camera %s: Error tearing down stream %d: %s (%d)", mCameraIdStr.string(), streamId,
+                "Camera %s: Error tearing down stream %d: %s (%d)", mCameraIdStr.c_str(), streamId,
                 strerror(-err), err);
     }
 
@@ -1621,7 +1622,7 @@
 
     const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
             outputConfiguration.getGraphicBufferProducers();
-    String8 physicalId(outputConfiguration.getPhysicalCameraId());
+    const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
 
     if (bufferProducers.size() == 0) {
         ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
@@ -1646,17 +1647,17 @@
 
     }
     if (deferredStreamIndex == NAME_NOT_FOUND && !streamIdConfigured) {
-        String8 msg = String8::format("Camera %s: deferred surface is set to a unknown stream"
-                "(ID %d)", mCameraIdStr.string(), streamId);
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: deferred surface is set to a unknown stream"
+                "(ID %d)", mCameraIdStr.c_str(), streamId);
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     if (mStreamInfoMap[streamId].finalized) {
-        String8 msg = String8::format("Camera %s: finalizeOutputConfigurations has been called"
-                " on stream ID %d", mCameraIdStr.string(), streamId);
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: finalizeOutputConfigurations has been called"
+                " on stream ID %d", mCameraIdStr.c_str(), streamId);
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     if (!mDevice.get()) {
@@ -1676,7 +1677,7 @@
         ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
         if (index != NAME_NOT_FOUND) {
             ALOGV("Camera %s: Surface already has a stream created "
-                    " for it (ID %zd)", mCameraIdStr.string(), index);
+                    " for it (ID %zd)", mCameraIdStr.c_str(), index);
             continue;
         }
 
@@ -1719,11 +1720,11 @@
     } else if (err == NO_INIT) {
         res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Deferred surface is invalid: %s (%d)",
-                mCameraIdStr.string(), strerror(-err), err);
+                mCameraIdStr.c_str(), strerror(-err), err);
     } else {
         res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
                 "Camera %s: Error setting output stream deferred surface: %s (%d)",
-                mCameraIdStr.string(), strerror(-err), err);
+                mCameraIdStr.c_str(), strerror(-err), err);
     }
 
     return res;
@@ -1735,10 +1736,10 @@
     if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
 
     if (!isValidAudioRestriction(mode)) {
-        String8 msg = String8::format("Camera %s: invalid audio restriction mode %d",
-                mCameraIdStr.string(), mode);
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: invalid audio restriction mode %d",
+                mCameraIdStr.c_str(), mode);
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     Mutex::Autolock icl(mBinderSerializationLock);
@@ -1817,15 +1818,15 @@
     }
 
     if (offlineOutputIds.empty()) {
-        String8 msg = String8::format("Offline surfaces must not be empty");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = "Offline surfaces must not be empty";
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     if (session == nullptr) {
-        String8 msg = String8::format("Invalid offline session");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = "Invalid offline session";
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     std::vector<int32_t> offlineStreamIds;
@@ -1834,17 +1835,17 @@
     for (const auto& streamId : offlineOutputIds) {
         ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
         if (index == NAME_NOT_FOUND) {
-            String8 msg = String8::format("Offline surface with id: %d is not registered",
+            std::string msg = fmt::sprintf("Offline surface with id: %d is not registered",
                     streamId);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
 
         if (!mStreamInfoMap[streamId].supportsOffline) {
-            String8 msg = String8::format("Offline surface with id: %d doesn't support "
+            std::string msg = fmt::sprintf("Offline surface with id: %d doesn't support "
                     "offline mode", streamId);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
 
         Mutex::Autolock l(mCompositeLock);
@@ -1881,7 +1882,7 @@
     if (ret != OK) {
         return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                 "Camera %s: Error switching to offline mode: %s (%d)",
-                mCameraIdStr.string(), strerror(ret), ret);
+                mCameraIdStr.c_str(), strerror(ret), ret);
     }
 
     sp<CameraOfflineSessionClient> offlineClient;
@@ -1906,7 +1907,7 @@
     } else {
         // In case we failed to register the offline client, ensure that it still initialized
         // so that all failing requests can return back correctly once the object is released.
-        offlineClient->initialize(nullptr /*cameraProviderManager*/, String8()/*monitorTags*/);
+        offlineClient->initialize(nullptr /*cameraProviderManager*/, std::string()/*monitorTags*/);
 
         switch(ret) {
             case BAD_VALUE:
@@ -1933,7 +1934,7 @@
 
 status_t CameraDeviceClient::dumpClient(int fd, const Vector<String16>& args) {
     dprintf(fd, "  CameraDeviceClient[%s] (%p) dump:\n",
-            mCameraIdStr.string(),
+            mCameraIdStr.c_str(),
             (getRemoteCallback() != NULL ?
                     IInterface::asBinder(getRemoteCallback()).get() : NULL) );
     dprintf(fd, "    Current client UID %u\n", mClientUid);
@@ -1966,7 +1967,7 @@
     return dumpDevice(fd, args);
 }
 
-status_t CameraDeviceClient::startWatchingTags(const String8 &tags, int out) {
+status_t CameraDeviceClient::startWatchingTags(const std::string &tags, int out) {
     sp<CameraDeviceBase> device = mDevice;
     if (!device) {
         dprintf(out, "  Device is detached.");
@@ -2090,16 +2091,16 @@
     if (mDevice == 0) return;
 
     nsecs_t startTime = systemTime();
-    ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
+    ALOGV("Camera %s: Stopping processors", mCameraIdStr.c_str());
 
     if (mFrameProcessor.get() != nullptr) {
         mFrameProcessor->removeListener(
                 camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
                 camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID, /*listener*/this);
         mFrameProcessor->requestExit();
-        ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string());
+        ALOGV("Camera %s: Waiting for threads", mCameraIdStr.c_str());
         mFrameProcessor->join();
-        ALOGV("Camera %s: Disconnecting device", mCameraIdStr.string());
+        ALOGV("Camera %s: Disconnecting device", mCameraIdStr.c_str());
     }
 
     // WORKAROUND: HAL refuses to disconnect while there's streams in flight
@@ -2205,7 +2206,7 @@
     entry = metadata.find(ANDROID_LED_TRANSMIT);
     if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) {
         String16 permissionString =
-            String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
+            toString16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
         if (!checkCallingPermission(permissionString)) {
             const int uid = CameraThreadState::getCallingUid();
             ALOGE("Permission Denial: "
@@ -2231,7 +2232,7 @@
 
     if (tempId == nullptr) {
         ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                "Camera %s: Invalid template argument", mCameraIdStr.string());
+                "Camera %s: Invalid template argument", mCameraIdStr.c_str());
         return ret;
     }
     switch(templateId) {
@@ -2256,21 +2257,21 @@
         default:
             ret = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
                     "Camera %s: Template ID %d is invalid or not supported",
-                    mCameraIdStr.string(), templateId);
+                    mCameraIdStr.c_str(), templateId);
             return ret;
     }
 
     return ret;
 }
 
-const CameraMetadata &CameraDeviceClient::getStaticInfo(const String8 &cameraId) {
+const CameraMetadata &CameraDeviceClient::getStaticInfo(const std::string &cameraId) {
     if (mDevice->getId() == cameraId) {
         return mDevice->info();
     }
     return mDevice->infoPhysical(cameraId);
 }
 
-bool CameraDeviceClient::supportsUltraHighResolutionCapture(const String8 &cameraId) {
+bool CameraDeviceClient::supportsUltraHighResolutionCapture(const std::string &cameraId) {
     const CameraMetadata &deviceInfo = getStaticInfo(cameraId);
     return SessionConfigurationUtils::supportsUltraHighResolutionCapture(deviceInfo);
 }
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 95563ee..86a94e2 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -50,10 +50,10 @@
 protected:
     CameraDeviceClientBase(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
-            const String16& clientPackageName,
+            const std::string& clientPackageName,
             bool systemNativeClient,
-            const std::optional<String16>& clientFeatureId,
-            const String8& cameraId,
+            const std::optional<std::string>& clientFeatureId,
+            const std::string& cameraId,
             int api1CameraId,
             int cameraFacing,
             int sensorOrientation,
@@ -181,10 +181,10 @@
     CameraDeviceClient(const sp<CameraService>& cameraService,
             const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
-            const String16& clientPackageName,
+            const std::string& clientPackageName,
             bool clientPackageOverride,
-            const std::optional<String16>& clientFeatureId,
-            const String8& cameraId,
+            const std::optional<std::string>& clientFeatureId,
+            const std::string& cameraId,
             int cameraFacing,
             int sensorOrientation,
             int clientPid,
@@ -192,11 +192,11 @@
             int servicePid,
             bool overrideForPerfClass,
             bool overrideToPortrait,
-            const String8& originalCameraId);
+            const std::string& originalCameraId);
     virtual ~CameraDeviceClient();
 
     virtual status_t      initialize(sp<CameraProviderManager> manager,
-            const String8& monitorTags) override;
+            const std::string& monitorTags) override;
 
     virtual status_t      setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
@@ -212,7 +212,7 @@
 
     virtual status_t      dumpClient(int fd, const Vector<String16>& args);
 
-    virtual status_t      startWatchingTags(const String8 &tags, int out);
+    virtual status_t      startWatchingTags(const std::string &tags, int out);
     virtual status_t      stopWatchingTags(int out);
     virtual status_t      dumpWatchedEventsToVector(std::vector<std::string> &out);
 
@@ -246,12 +246,12 @@
     // Calculate the ANativeWindow transform from android.sensor.orientation
     status_t              getRotationTransformLocked(int mirrorMode, /*out*/int32_t* transform);
 
-    bool supportsUltraHighResolutionCapture(const String8 &cameraId);
+    bool supportsUltraHighResolutionCapture(const std::string &cameraId);
 
     bool isSensorPixelModeConsistent(const std::list<int> &streamIdList,
             const CameraMetadata &settings);
 
-    const CameraMetadata &getStaticInfo(const String8 &cameraId);
+    const CameraMetadata &getStaticInfo(const std::string &cameraId);
 
 private:
     // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
@@ -290,7 +290,7 @@
     std::vector<int32_t> mSupportedPhysicalRequestKeys;
 
     template<typename TProviderPtr>
-    status_t      initializeImpl(TProviderPtr providerPtr, const String8& monitorTags);
+    status_t      initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags);
 
     /** Utility members */
     binder::Status checkPidStatus(const char* checkLocation);
@@ -371,7 +371,7 @@
     int mVideoStabilizationMode = -1;
 
     // This only exists in case of camera ID Remapping.
-    String8 mOriginalCameraId;
+    const std::string mOriginalCameraId;
 };
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
index 86a0ebc..99bdb0e 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.cpp
@@ -21,12 +21,13 @@
 #include "CameraOfflineSessionClient.h"
 #include "utils/CameraThreadState.h"
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 
 namespace android {
 
 using binder::Status;
 
-status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const String8&) {
+status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const std::string&) {
     ATRACE_CALL();
 
     if (mFrameProcessor.get() != nullptr) {
@@ -42,14 +43,13 @@
 
     if (mOfflineSession.get() == nullptr) {
         ALOGE("%s: Camera %s: No valid offline session",
-                __FUNCTION__, mCameraIdStr.string());
+                __FUNCTION__, mCameraIdStr.c_str());
         return NO_INIT;
     }
 
-    String8 threadName;
     mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
-    threadName = String8::format("Offline-%s-FrameProc", mCameraIdStr.string());
-    res = mFrameProcessor->run(threadName.string());
+    std::string threadName = fmt::sprintf("Offline-%s-FrameProc", mCameraIdStr.c_str());
+    res = mFrameProcessor->run(threadName.c_str());
     if (res != OK) {
         ALOGE("%s: Unable to start frame processor thread: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -65,7 +65,7 @@
     res = mOfflineSession->initialize(weakThis);
     if (res != OK) {
         ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
-                __FUNCTION__, mCameraIdStr.string(), strerror(-res), res);
+                __FUNCTION__, mCameraIdStr.c_str(), strerror(-res), res);
         return res;
     }
 
@@ -119,14 +119,14 @@
 }
 
 status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>& args) {
-    String8 result;
+    std::string result;
 
     result = "  Offline session dump:\n";
-    write(fd, result.string(), result.size());
+    write(fd, result.c_str(), result.size());
 
     if (mOfflineSession.get() == nullptr) {
         result = "  *** Offline session is detached\n";
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         return NO_ERROR;
     }
 
@@ -134,15 +134,15 @@
 
     auto res = mOfflineSession->dump(fd);
     if (res != OK) {
-        result = String8::format("   Error dumping offline session: %s (%d)",
+        result = fmt::sprintf("   Error dumping offline session: %s (%d)",
                 strerror(-res), res);
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
     }
 
     return OK;
 }
 
-status_t CameraOfflineSessionClient::startWatchingTags(const String8 &tags, int outFd) {
+status_t CameraOfflineSessionClient::startWatchingTags(const std::string &tags, int outFd) {
     return BasicClient::startWatchingTags(tags, outFd);
 }
 
@@ -171,7 +171,7 @@
     mDisconnected = true;
 
     sCameraService->removeByClient(this);
-    sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, String8(mClientPackageName));
+    sCameraService->logDisconnectedOffline(mCameraIdStr, mClientPid, mClientPackageName);
 
     sp<IBinder> remote = getRemote();
     if (remote != nullptr) {
@@ -186,7 +186,7 @@
 
     finishCameraOps();
     ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
-            mCameraIdStr.string(), mClientPid);
+            mCameraIdStr.c_str(), mClientPid);
 
     // client shouldn't be able to call into us anymore
     mClientPid = 0;
@@ -231,7 +231,7 @@
     ATRACE_CALL();
     {
         ALOGV("%s: Start camera ops, package name = %s, client UID = %d",
-              __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+              __FUNCTION__, mClientPackageName.c_str(), mClientUid);
     }
 
     if (mAppOpsManager != nullptr) {
@@ -240,14 +240,14 @@
         int32_t res;
         // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
         mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
-                mClientPackageName, mOpsCallback);
+                toString16(mClientPackageName), mOpsCallback);
         // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
         res = mAppOpsManager->startOpNoThrow(AppOpsManager::OP_CAMERA,
-                mClientUid, mClientPackageName, /*startIfModeDefault*/ false);
+                mClientUid, toString16(mClientPackageName), /*startIfModeDefault*/ false);
 
         if (res == AppOpsManager::MODE_ERRORED) {
             ALOGI("Offline Camera %s: Access for \"%s\" has been revoked",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
+                    mCameraIdStr.c_str(), mClientPackageName.c_str());
             return PERMISSION_DENIED;
         }
 
@@ -255,7 +255,7 @@
         // return MODE_IGNORED. Do not treat such case as error.
         if (!mUidIsTrusted && res == AppOpsManager::MODE_IGNORED) {
             ALOGI("Offline Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.string(), String8(mClientPackageName).string());
+                    mCameraIdStr.c_str(), mClientPackageName.c_str());
             // Return the same error as for device policy manager rejection
             return -EACCES;
         }
@@ -278,7 +278,7 @@
         if (mAppOpsManager != nullptr) {
         // TODO : possibly change this to OP_OFFLINE_CAMERA_SESSION
             mAppOpsManager->finishOp(AppOpsManager::OP_CAMERA, mClientUid,
-                    mClientPackageName);
+                    toString16(mClientPackageName));
             mOpsActive = false;
         }
     }
@@ -368,10 +368,10 @@
                 CaptureResultExtras());
 }
 
-status_t CameraOfflineSessionClient::injectCamera(const String8& injectedCamId,
+status_t CameraOfflineSessionClient::injectCamera(const std::string& injectedCamId,
             sp<CameraProviderManager> manager) {
     ALOGV("%s: This client doesn't support the injection camera. injectedCamId: %s providerPtr: %p",
-            __FUNCTION__, injectedCamId.string(), manager.get());
+            __FUNCTION__, injectedCamId.c_str(), manager.get());
 
     return OK;
 }
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index ad763f9..70bad03 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -47,9 +47,9 @@
             sp<CameraOfflineSessionBase> session,
             const KeyedVector<sp<IBinder>, sp<CompositeStream>>& offlineCompositeStreamMap,
             const sp<ICameraDeviceCallbacks>& remoteCallback,
-            const String16& clientPackageName,
-            const std::optional<String16>& clientFeatureId,
-            const String8& cameraIdStr, int cameraFacing, int sensorOrientation,
+            const std::string& clientPackageName,
+            const std::optional<std::string>& clientFeatureId,
+            const std::string& cameraIdStr, int cameraFacing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid) :
             CameraService::BasicClient(
                     cameraService,
@@ -73,12 +73,12 @@
 
     status_t dumpClient(int /*fd*/, const Vector<String16>& /*args*/) override;
 
-    status_t startWatchingTags(const String8 &tags, int outFd) override;
+    status_t startWatchingTags(const std::string &tags, int outFd) override;
     status_t stopWatchingTags(int outFd) override;
     status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
 
     status_t initialize(sp<CameraProviderManager> /*manager*/,
-            const String8& /*monitorTags*/) override;
+            const std::string& /*monitorTags*/) override;
 
     status_t setRotateAndCropOverride(uint8_t rotateAndCrop) override;
 
@@ -117,7 +117,7 @@
     void notifyPrepared(int streamId) override;
     void notifyRequestQueueEmpty() override;
     void notifyRepeatingRequestError(long lastFrameNumber) override;
-    status_t injectCamera(const String8& injectedCamId,
+    status_t injectCamera(const std::string& injectedCamId,
             sp<CameraProviderManager> manager) override;
     status_t stopInjection() override;
 
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 4ed1c28..8f53458 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -46,7 +46,7 @@
 
 status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-        camera_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
+        camera_stream_rotation_t rotation, int * id, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> * surfaceIds,
         int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index a551d11..1b7fc6e 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -43,7 +43,7 @@
 
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
             int streamSetId, bool isShared, bool isMultiResolution, int32_t colorSpace,
@@ -57,7 +57,7 @@
     // Create and register all internal camera streams.
     virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
             int streamSetId, bool isShared, int32_t colorSpace,
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 737c2b5..1bd0b85 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -20,6 +20,7 @@
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
+#include <camera/StringUtils.h>
 
 #include "api1/client2/JpegProcessor.h"
 #include "common/CameraProviderManager.h"
@@ -495,17 +496,17 @@
     status_t err;
     int format;
     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
-        String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
+        std::string msg = fmt::sprintf("Failed to query Surface format: %s (%d)", strerror(-err),
                 err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return false;
     }
 
     int dataspace;
     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
-        String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
+        std::string msg = fmt::sprintf("Failed to query Surface dataspace: %s (%d)", strerror(-err),
                 err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return false;
     }
 
@@ -578,7 +579,7 @@
 
 status_t DepthCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
-        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
         int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index fbe99dd..f797f9c 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -50,7 +50,7 @@
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
             int streamSetId, bool isShared, int32_t colorSpace,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 294a908..68e9ad4 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -29,6 +29,7 @@
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 
 #include <mediadrm/ICrypto.h>
 #include <media/MediaCodecBuffer.h>
@@ -98,17 +99,17 @@
     status_t err;
     int format;
     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
-        String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
+        std::string msg = fmt::sprintf("Failed to query Surface format: %s (%d)", strerror(-err),
                 err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return false;
     }
 
     int dataspace;
     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
-        String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
+        std::string msg = fmt::sprintf("Failed to query Surface dataspace: %s (%d)", strerror(-err),
                 err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return false;
     }
 
@@ -117,7 +118,7 @@
 
 status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
-        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
         int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 602a247..b539cdd 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -45,7 +45,7 @@
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, int32_t colorSpace,
             int64_t dynamicProfile, int64_t streamUseCase, bool useReadoutTimestamp) override;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index 6588470..988446b 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -548,7 +548,7 @@
 
 status_t JpegRCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
         bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
-        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+        camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds,
         int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 3dfed30..016d57c 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -47,7 +47,7 @@
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
-            camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+            camera_stream_rotation_t rotation, int *id, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds,
             int streamSetId, bool isShared, int32_t colorSpace,
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6e10f30..a54ba9b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -28,6 +28,7 @@
 #include <gui/Surface.h>
 
 #include <camera/CameraSessionStats.h>
+#include <camera/StringUtils.h>
 
 #include "common/Camera2ClientBase.h"
 
@@ -49,10 +50,10 @@
         const sp<CameraService>& cameraService,
         const sp<TCamCallbacks>& remoteCallback,
         std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
-        const String16& clientPackageName,
+        const std::string& clientPackageName,
         bool systemNativeClient,
-        const std::optional<String16>& clientFeatureId,
-        const String8& cameraId,
+        const std::optional<std::string>& clientFeatureId,
+        const std::string& cameraId,
         int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
@@ -69,8 +70,8 @@
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
 {
-    ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
-            String8(clientPackageName).string(), clientPid, clientUid);
+    ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.c_str(),
+            clientPackageName.c_str(), clientPid, clientUid);
 
     mInitialClientPid = clientPid;
     mOverrideForPerfClass = overrideForPerfClass;
@@ -91,21 +92,21 @@
 
 template <typename TClientBase>
 status_t Camera2ClientBase<TClientBase>::initialize(sp<CameraProviderManager> manager,
-        const String8& monitorTags) {
+        const std::string& monitorTags) {
     return initializeImpl(manager, monitorTags);
 }
 
 template <typename TClientBase>
 template <typename TProviderPtr>
 status_t Camera2ClientBase<TClientBase>::initializeImpl(TProviderPtr providerPtr,
-        const String8& monitorTags) {
+        const std::string& monitorTags) {
     ATRACE_CALL();
     ALOGV("%s: Initializing client for camera %s", __FUNCTION__,
-          TClientBase::mCameraIdStr.string());
+          TClientBase::mCameraIdStr.c_str());
     status_t res;
 
     IPCTransport providerTransport = IPCTransport::INVALID;
-    res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr.string(),
+    res = providerPtr->getCameraIdIPCTransport(TClientBase::mCameraIdStr,
             &providerTransport);
     if (res != OK) {
         return res;
@@ -125,19 +126,19 @@
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
-                    TClientBase::mCameraIdStr.string());
+                    TClientBase::mCameraIdStr.c_str());
             return NO_INIT;
     }
     if (mDevice == NULL) {
         ALOGE("%s: Camera %s: No device connected",
-                __FUNCTION__, TClientBase::mCameraIdStr.string());
+                __FUNCTION__, TClientBase::mCameraIdStr.c_str());
         return NO_INIT;
     }
 
     res = mDevice->initialize(providerPtr, monitorTags);
     if (res != OK) {
         ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
-                __FUNCTION__, TClientBase::mCameraIdStr.string(), strerror(-res), res);
+                __FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
         return res;
     }
 
@@ -152,7 +153,7 @@
     res = mDevice->setNotifyCallback(weakThis);
     if (res != OK) {
         ALOGE("%s: Camera %s: Unable to set notify callback: %s (%d)",
-                __FUNCTION__, TClientBase::mCameraIdStr.string(), strerror(-res), res);
+                __FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
         return res;
     }
 
@@ -183,30 +184,30 @@
     }
 
     ALOGI("%s: Client object's dtor for Camera Id %s completed. Client was: %s (PID %d, UID %u)",
-            __FUNCTION__, TClientBase::mCameraIdStr.string(),
-            String8(TClientBase::mClientPackageName).string(),
+            __FUNCTION__, TClientBase::mCameraIdStr.c_str(),
+            TClientBase::mClientPackageName.c_str(),
             mInitialClientPid, TClientBase::mClientUid);
 }
 
 template <typename TClientBase>
 status_t Camera2ClientBase<TClientBase>::dumpClient(int fd,
                                               const Vector<String16>& args) {
-    String8 result;
-    result.appendFormat("Camera2ClientBase[%s] (%p) PID: %d, dump:\n",
-            TClientBase::mCameraIdStr.string(),
+    std::string result;
+    result += fmt::sprintf("Camera2ClientBase[%s] (%p) PID: %d, dump:\n",
+            TClientBase::mCameraIdStr.c_str(),
             (TClientBase::getRemoteCallback() != NULL ?
-                    IInterface::asBinder(TClientBase::getRemoteCallback()).get() : NULL),
+                    (void *)IInterface::asBinder(TClientBase::getRemoteCallback()).get() : NULL),
             TClientBase::mClientPid);
-    result.append("  State: ");
+    result += "  State: ";
 
-    write(fd, result.string(), result.size());
+    write(fd, result.c_str(), result.size());
     // TODO: print dynamic/request section from most recent requests
 
     return dumpDevice(fd, args);
 }
 
 template <typename TClientBase>
-status_t Camera2ClientBase<TClientBase>::startWatchingTags(const String8 &tags, int out) {
+status_t Camera2ClientBase<TClientBase>::startWatchingTags(const std::string &tags, int out) {
   sp<CameraDeviceBase> device = mDevice;
   if (!device) {
     dprintf(out, "  Device is detached");
@@ -241,23 +242,23 @@
 status_t Camera2ClientBase<TClientBase>::dumpDevice(
                                                 int fd,
                                                 const Vector<String16>& args) {
-    String8 result;
+    std::string result;
 
     result = "  Device dump:\n";
-    write(fd, result.string(), result.size());
+    write(fd, result.c_str(), result.size());
 
     sp<CameraDeviceBase> device = mDevice;
     if (!device.get()) {
         result = "  *** Device is detached\n";
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         return NO_ERROR;
     }
 
     status_t res = device->dump(fd, args);
     if (res != OK) {
-        result = String8::format("   Error dumping device: %s (%d)",
+        result = fmt::sprintf("   Error dumping device: %s (%d)",
                 strerror(-res), res);
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
     }
 
     return NO_ERROR;
@@ -284,23 +285,23 @@
 template <typename TClientBase>
 binder::Status Camera2ClientBase<TClientBase>::disconnectImpl() {
     ATRACE_CALL();
-    ALOGD("Camera %s: start to disconnect", TClientBase::mCameraIdStr.string());
+    ALOGD("Camera %s: start to disconnect", TClientBase::mCameraIdStr.c_str());
     Mutex::Autolock icl(mBinderSerializationLock);
 
-    ALOGD("Camera %s: serializationLock acquired", TClientBase::mCameraIdStr.string());
+    ALOGD("Camera %s: serializationLock acquired", TClientBase::mCameraIdStr.c_str());
     binder::Status res = binder::Status::ok();
     // Allow both client and the media server to disconnect at all times
     int callingPid = CameraThreadState::getCallingPid();
     if (callingPid != TClientBase::mClientPid &&
         callingPid != TClientBase::mServicePid) return res;
 
-    ALOGD("Camera %s: Shutting down", TClientBase::mCameraIdStr.string());
+    ALOGD("Camera %s: Shutting down", TClientBase::mCameraIdStr.c_str());
 
     // Before detaching the device, cache the info from current open session.
     // The disconnected check avoids duplication of info and also prevents
     // deadlock while acquiring service lock in cacheDump.
     if (!TClientBase::mDisconnected) {
-        ALOGD("Camera %s: start to cacheDump", TClientBase::mCameraIdStr.string());
+        ALOGD("Camera %s: start to cacheDump", TClientBase::mCameraIdStr.c_str());
         Camera2ClientBase::getCameraService()->cacheDump();
     }
 
@@ -308,7 +309,7 @@
 
     CameraService::BasicClient::disconnect();
 
-    ALOGV("Camera %s: Shut down complete", TClientBase::mCameraIdStr.string());
+    ALOGV("Camera %s: Shut down complete", TClientBase::mCameraIdStr.c_str());
 
     return res;
 }
@@ -318,7 +319,7 @@
     if (mDevice == 0) return;
     mDevice->disconnect();
 
-    ALOGV("Camera %s: Detach complete", TClientBase::mCameraIdStr.string());
+    ALOGV("Camera %s: Detach complete", TClientBase::mCameraIdStr.c_str());
 }
 
 template <typename TClientBase>
@@ -334,7 +335,7 @@
         ALOGE("%s: Camera %s: Connection attempt from pid %d; "
                 "current locked to pid %d",
                 __FUNCTION__,
-                TClientBase::mCameraIdStr.string(),
+                TClientBase::mCameraIdStr.c_str(),
                 CameraThreadState::getCallingPid(),
                 TClientBase::mClientPid);
         return BAD_VALUE;
@@ -365,8 +366,7 @@
         return;
     }
 
-    String8 physicalId8(physicalId.c_str());
-    auto physicalCameraMetadata = mDevice->infoPhysical(physicalId8);
+    auto physicalCameraMetadata = mDevice->infoPhysical(physicalId);
     auto orientationEntry = physicalCameraMetadata.find(ANDROID_SENSOR_ORIENTATION);
 
     if (orientationEntry.count == 1) {
@@ -387,7 +387,7 @@
         status_t res = TClientBase::startCameraStreamingOps();
         if (res != OK) {
             ALOGE("%s: Camera %s: Error starting camera streaming ops: %d", __FUNCTION__,
-                    TClientBase::mCameraIdStr.string(), res);
+                    TClientBase::mCameraIdStr.c_str(), res);
             return res;
         }
         mCameraServiceProxyWrapper->logActive(TClientBase::mCameraIdStr, maxPreviewFps);
@@ -407,7 +407,7 @@
         status_t res = TClientBase::finishCameraStreamingOps();
         if (res != OK) {
             ALOGE("%s: Camera %s: Error finishing streaming ops: %d", __FUNCTION__,
-                    TClientBase::mCameraIdStr.string(), res);
+                    TClientBase::mCameraIdStr.c_str(), res);
         }
         mCameraServiceProxyWrapper->logIdle(TClientBase::mCameraIdStr,
                 requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
@@ -522,7 +522,7 @@
 }
 
 template <typename TClientBase>
-status_t Camera2ClientBase<TClientBase>::injectCamera(const String8& injectedCamId,
+status_t Camera2ClientBase<TClientBase>::injectCamera(const std::string& injectedCamId,
         sp<CameraProviderManager> manager) {
     return mDevice->injectCamera(injectedCamId, manager);
 }
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 5cf3033..30c763d 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -50,10 +50,10 @@
     Camera2ClientBase(const sp<CameraService>& cameraService,
                       const sp<TCamCallbacks>& remoteCallback,
                       std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
-                      const String16& clientPackageName,
+                      const std::string& clientPackageName,
                       bool systemNativeClient,
-                      const std::optional<String16>& clientFeatureId,
-                      const String8& cameraId,
+                      const std::optional<std::string>& clientFeatureId,
+                      const std::string& cameraId,
                       int api1CameraId,
                       int cameraFacing,
                       int sensorOrientation,
@@ -65,11 +65,12 @@
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
-    virtual status_t      initialize(sp<CameraProviderManager> manager, const String8& monitorTags);
-    virtual status_t      dumpClient(int fd, const Vector<String16>& args);
-    virtual status_t      startWatchingTags(const String8 &tags, int out);
-    virtual status_t      stopWatchingTags(int out);
-    virtual status_t      dumpWatchedEventsToVector(std::vector<std::string> &out);
+    virtual status_t      initialize(sp<CameraProviderManager> manager,
+            const std::string& monitorTags) override;
+    virtual status_t      dumpClient(int fd, const Vector<String16>& args) override;
+    virtual status_t      startWatchingTags(const std::string &tags, int out) override;
+    virtual status_t      stopWatchingTags(int out) override;
+    virtual status_t      dumpWatchedEventsToVector(std::vector<std::string> &out) override;
 
     /**
      * NotificationListener implementation
@@ -130,7 +131,7 @@
         mutable Mutex mRemoteCallbackLock;
     } mSharedCameraCallbacks;
 
-    status_t      injectCamera(const String8& injectedCamId,
+    status_t      injectCamera(const std::string& injectedCamId,
                                sp<CameraProviderManager> manager) override;
     status_t      stopInjection() override;
 
@@ -181,7 +182,7 @@
 
 private:
     template<typename TProviderPtr>
-    status_t              initializeImpl(TProviderPtr providerPtr, const String8& monitorTags);
+    status_t              initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags);
 
     binder::Status disconnectImpl();
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 919108d..017da0f 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -20,7 +20,6 @@
 #include <list>
 
 #include <utils/RefBase.h>
-#include <utils/String8.h>
 #include <utils/String16.h>
 #include <utils/Vector.h>
 #include <utils/KeyedVector.h>
@@ -99,11 +98,12 @@
      */
     virtual metadata_vendor_id_t getVendorTagId() const = 0;
 
-    virtual status_t initialize(sp<CameraProviderManager> manager, const String8& monitorTags) = 0;
+    virtual status_t initialize(sp<CameraProviderManager> manager,
+            const std::string& monitorTags) = 0;
     virtual status_t disconnect() = 0;
 
     virtual status_t dump(int fd, const Vector<String16> &args) = 0;
-    virtual status_t startWatchingTags(const String8 &tags) = 0;
+    virtual status_t startWatchingTags(const std::string &tags) = 0;
     virtual status_t stopWatchingTags() = 0;
     virtual status_t dumpWatchedEventsToVector(std::vector<std::string> &out) = 0;
 
@@ -111,7 +111,7 @@
      * The physical camera device's static characteristics metadata buffer, or
      * the logical camera's static characteristics if physical id is empty.
      */
-    virtual const CameraMetadata& infoPhysical(const String8& physicalId) const = 0;
+    virtual const CameraMetadata& infoPhysical(const std::string& physicalId) const = 0;
 
     virtual bool isCompositeJpegRDisabled() const { return false; };
 
@@ -188,7 +188,7 @@
     virtual status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t>  &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
@@ -212,7 +212,7 @@
     virtual status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
@@ -510,7 +510,7 @@
      * The injection camera session to replace the internal camera
      * session.
      */
-    virtual status_t injectCamera(const String8& injectedCamId,
+    virtual status_t injectCamera(const std::string& injectedCamId,
             sp<CameraProviderManager> manager) = 0;
 
     /**
diff --git a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
index 63abcf0..976c47c 100644
--- a/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
+++ b/services/camera/libcameraservice/common/CameraOfflineSessionBase.h
@@ -20,7 +20,6 @@
 #include <vector>
 
 #include <utils/RefBase.h>
-#include <utils/String8.h>
 #include <utils/Timers.h>
 
 #include "camera/CaptureResult.h"
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 1a6e341..23051ef 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -43,6 +43,7 @@
 #include <hwbinder/IPCThreadState.h>
 #include <utils/Trace.h>
 #include <ui/PublicFormat.h>
+#include <camera/StringUtils.h>
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
@@ -143,7 +144,7 @@
             String16(aidlHalServiceDescriptor));
     for (const auto &aidlInstance : aidlProviders) {
         std::string aidlServiceName =
-                getFullAidlProviderName(std::string(String8(aidlInstance).c_str()));
+                getFullAidlProviderName(toStdString(aidlInstance));
         auto res = sm->registerForNotifications(String16(aidlServiceName.c_str()), this);
         if (res != OK) {
             ALOGE("%s Unable to register for notifications with AIDL service manager",
@@ -774,14 +775,14 @@
         primaryMap = &mCameraProviderByCameraId;
         alternateMap = &mTorchProviderByCameraId;
     }
-    auto id = cameraId.c_str();
-    (*primaryMap)[id] = provider;
-    auto search = alternateMap->find(id);
+
+    (*primaryMap)[cameraId] = provider;
+    auto search = alternateMap->find(cameraId);
     if (search != alternateMap->end()) {
         ALOGW("%s: Camera device %s is using both torch mode and camera mode simultaneously. "
-                "That should not be possible", __FUNCTION__, id);
+                "That should not be possible", __FUNCTION__, cameraId.c_str());
     }
-    ALOGV("%s: Camera device %s connected", __FUNCTION__, id);
+    ALOGV("%s: Camera device %s connected", __FUNCTION__, cameraId.c_str());
 }
 
 void CameraProviderManager::removeRef(DeviceMode usageType, const std::string &cameraId) {
@@ -796,7 +797,7 @@
         providerMap = &mCameraProviderByCameraId;
     }
     std::lock_guard<std::mutex> lock(mProviderInterfaceMapLock);
-    auto search = providerMap->find(cameraId.c_str());
+    auto search = providerMap->find(cameraId);
     if (search != providerMap->end()) {
         // Drop the reference to this ICameraProvider. This is safe to do immediately (without an
         // added delay) because hwservicemanager guarantees to hold the reference for at least five
@@ -805,7 +806,7 @@
         // restart it. An example when this could happen is switching from a front-facing to a
         // rear-facing camera. If the HAL were to exit during the camera switch, the camera could
         // appear janky to the user.
-        providerMap->erase(cameraId.c_str());
+        providerMap->erase(cameraId);
         IPCThreadState::self()->flushCommands();
     } else {
         ALOGE("%s: Asked to remove reference for camera %s, but no reference to it was found. This "
@@ -823,7 +824,7 @@
     {
         std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
-        res = addAidlProviderLocked(String8(name).c_str());
+        res = addAidlProviderLocked(toStdString(name));
     }
 
     sp<StatusListener> listener = getStatusListener();
@@ -2035,14 +2036,14 @@
 status_t CameraProviderManager::removeProvider(const std::string& provider) {
     std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
     std::unique_lock<std::mutex> lock(mInterfaceMutex);
-    std::vector<String8> removedDeviceIds;
+    std::vector<std::string> removedDeviceIds;
     status_t res = NAME_NOT_FOUND;
     std::string removedProviderName;
     for (auto it = mProviders.begin(); it != mProviders.end(); it++) {
         if ((*it)->mProviderInstance == provider) {
             removedDeviceIds.reserve((*it)->mDevices.size());
             for (auto& deviceInfo : (*it)->mDevices) {
-                removedDeviceIds.push_back(String8(deviceInfo->mId.c_str()));
+                removedDeviceIds.push_back(deviceInfo->mId);
             }
             removedProviderName = (*it)->mProviderName;
             mProviders.erase(it);
@@ -2182,7 +2183,7 @@
     return OK;
 }
 
-void CameraProviderManager::ProviderInfo::removeDevice(std::string id) {
+void CameraProviderManager::ProviderInfo::removeDevice(const std::string &id) {
     for (auto it = mDevices.begin(); it != mDevices.end(); it++) {
         if ((*it)->mId == id) {
             mUniqueCameraIds.erase(id);
@@ -2222,8 +2223,7 @@
             ALOGV("%s: notify device not_present: %s",
                   __FUNCTION__,
                   deviceName.c_str());
-            listener->onDeviceStatusChanged(String8(id.c_str()),
-                                            CameraDeviceStatus::NOT_PRESENT);
+            listener->onDeviceStatusChanged(id, CameraDeviceStatus::NOT_PRESENT);
             mLock.lock();
         }
     }
@@ -2324,8 +2324,7 @@
     CameraDeviceStatus internalNewStatus = newStatus;
     if (!mInitialized) {
         mCachedStatus.emplace_back(false /*isPhysicalCameraStatus*/,
-                cameraDeviceName.c_str(), std::string().c_str(),
-                internalNewStatus);
+                cameraDeviceName, std::string(), internalNewStatus);
         return;
     }
 
@@ -2339,7 +2338,7 @@
 
     // Call without lock held to allow reentrancy into provider manager
     if (listener != nullptr) {
-        listener->onDeviceStatusChanged(String8(id.c_str()), internalNewStatus);
+        listener->onDeviceStatusChanged(id, internalNewStatus);
     }
 }
 
@@ -2415,8 +2414,7 @@
     }
     // Call without lock held to allow reentrancy into provider manager
     if (listener != nullptr) {
-        listener->onDeviceStatusChanged(String8(id.c_str()),
-                String8(physicalId.c_str()), newStatus);
+        listener->onDeviceStatusChanged(id, physicalId, newStatus);
     }
     return;
 }
@@ -2467,7 +2465,7 @@
     }
 
     *id = cameraId;
-    *physicalId = physicalCameraDeviceName.c_str();
+    *physicalId = physicalCameraDeviceName;
     return OK;
 }
 
@@ -2511,7 +2509,7 @@
     // findDeviceInfo, which should be holding mLock while iterating through
     // each provider's devices).
     if (listener != nullptr) {
-        listener->onTorchStatusChanged(String8(id.c_str()), newStatus, systemCameraKind);
+        listener->onTorchStatusChanged(id, newStatus, systemCameraKind);
     }
     return;
 }
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index e6e4619..a2ec576 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -179,15 +179,15 @@
     struct StatusListener : virtual public RefBase {
         ~StatusListener() {}
 
-        virtual void onDeviceStatusChanged(const String8 &cameraId,
+        virtual void onDeviceStatusChanged(const std::string &cameraId,
                 CameraDeviceStatus newStatus) = 0;
-        virtual void onDeviceStatusChanged(const String8 &cameraId,
-                const String8 &physicalCameraId,
+        virtual void onDeviceStatusChanged(const std::string &cameraId,
+                const std::string &physicalCameraId,
                 CameraDeviceStatus newStatus) = 0;
-        virtual void onTorchStatusChanged(const String8 &cameraId,
+        virtual void onTorchStatusChanged(const std::string &cameraId,
                 TorchModeStatus newStatus,
                 SystemCameraKind kind) = 0;
-        virtual void onTorchStatusChanged(const String8 &cameraId,
+        virtual void onTorchStatusChanged(const std::string &cameraId,
                 TorchModeStatus newStatus) = 0;
         virtual void onNewProviderRegistered() = 0;
     };
@@ -784,7 +784,7 @@
         void torchModeStatusChangeInternal(const std::string& cameraDeviceName,
                 TorchModeStatus newStatus);
 
-        void removeDevice(std::string id);
+        void removeDevice(const std::string &id);
 
     };
 
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index e259379..2322def 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -21,6 +21,7 @@
 #include <map>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 
 #include "common/FrameProducer.h"
 #include "common/FrameProcessorBase.h"
@@ -86,8 +87,8 @@
 }
 
 void FrameProcessorBase::dump(int fd, const Vector<String16>& /*args*/) {
-    String8 result("    Latest received frame:\n");
-    write(fd, result.string(), result.size());
+    std::string result("    Latest received frame:\n");
+    write(fd, result.c_str(), result.size());
 
     CameraMetadata lastFrame;
     std::map<std::string, CameraMetadata> lastPhysicalFrames;
@@ -97,16 +98,16 @@
         lastFrame = CameraMetadata(mLastFrame);
 
         for (const auto& physicalFrame : mLastPhysicalFrames) {
-            lastPhysicalFrames.emplace(String8(physicalFrame.mPhysicalCameraId),
+            lastPhysicalFrames.emplace(physicalFrame.mPhysicalCameraId,
                     physicalFrame.mPhysicalCameraMetadata);
         }
     }
     lastFrame.dump(fd, /*verbosity*/2, /*indentation*/6);
 
     for (const auto& physicalFrame : lastPhysicalFrames) {
-        result = String8::format("   Latest received frame for physical camera %s:\n",
+        result = fmt::sprintf("   Latest received frame for physical camera %s:\n",
                 physicalFrame.first.c_str());
-        write(fd, result.string(), result.size());
+        write(fd, result.c_str(), result.size());
         CameraMetadata lastPhysicalMetadata = CameraMetadata(physicalFrame.second);
         lastPhysicalMetadata.sort();
         lastPhysicalMetadata.dump(fd, /*verbosity*/2, /*indentation*/6);
@@ -138,7 +139,7 @@
     ATRACE_CALL();
     CaptureResult result;
 
-    ALOGV("%s: Camera %s: Process new frames", __FUNCTION__, device->getId().string());
+    ALOGV("%s: Camera %s: Process new frames", __FUNCTION__, device->getId().c_str());
 
     while ( (res = device->getNextResult(&result)) == OK) {
 
@@ -149,7 +150,7 @@
         entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
         if (entry.count == 0) {
             ALOGE("%s: Camera %s: Error reading frame number",
-                    __FUNCTION__, device->getId().string());
+                    __FUNCTION__, device->getId().c_str());
             break;
         }
         ATRACE_INT("cam2_frame", entry.data.i32[0]);
@@ -167,7 +168,7 @@
     }
     if (res != NOT_ENOUGH_DATA) {
         ALOGE("%s: Camera %s: Error getting next frame: %s (%d)",
-                __FUNCTION__, device->getId().string(), strerror(-res), res);
+                __FUNCTION__, device->getId().c_str(), strerror(-res), res);
         return;
     }
 
@@ -177,7 +178,7 @@
 bool FrameProcessorBase::processSingleFrame(CaptureResult &result,
                                             const sp<FrameProducer> &device) {
     ALOGV("%s: Camera %s: Process single frame (is empty? %d)",
-            __FUNCTION__, device->getId().string(), result.mMetadata.isEmpty());
+            __FUNCTION__, device->getId().c_str(), result.mMetadata.isEmpty());
     return processListeners(result, device) == OK;
 }
 
@@ -197,7 +198,7 @@
     // include CaptureResultExtras.
     entry = result.mMetadata.find(ANDROID_REQUEST_ID);
     if (entry.count == 0) {
-        ALOGE("%s: Camera %s: Error reading frame id", __FUNCTION__, device->getId().string());
+        ALOGE("%s: Camera %s: Error reading frame id", __FUNCTION__, device->getId().c_str());
         return BAD_VALUE;
     }
     int32_t requestId = entry.data.i32[0];
@@ -223,7 +224,7 @@
         }
     }
     ALOGV("%s: Camera %s: Got %zu range listeners out of %zu", __FUNCTION__,
-          device->getId().string(), listeners.size(), mRangeListeners.size());
+          device->getId().c_str(), listeners.size(), mRangeListeners.size());
 
     List<sp<FilteredListener> >::iterator item = listeners.begin();
     for (; item != listeners.end(); item++) {
diff --git a/services/camera/libcameraservice/common/FrameProducer.h b/services/camera/libcameraservice/common/FrameProducer.h
index a14b3d6..dd4df7d 100644
--- a/services/camera/libcameraservice/common/FrameProducer.h
+++ b/services/camera/libcameraservice/common/FrameProducer.h
@@ -18,7 +18,6 @@
 #define ANDROID_SERVERS_CAMERA_FRAMEPRODUCER_H
 
 #include <utils/RefBase.h>
-#include <utils/String8.h>
 #include <utils/Timers.h>
 
 #include "camera/CameraMetadata.h"
@@ -39,7 +38,7 @@
     /**
      * Retrieve the device camera ID
      */
-    virtual const String8& getId() const = 0;
+    virtual const std::string& getId() const = 0;
 
     /**
      * Wait for a new frame to be produced, with timeout in nanoseconds.
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index b18cbd4..9659db8 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -431,7 +431,7 @@
     for (const auto& combination : combs) {
         std::unordered_set<std::string> deviceIds;
         for (const auto &cameraDeviceId : combination.combination) {
-            deviceIds.insert(cameraDeviceId.c_str());
+            deviceIds.insert(cameraDeviceId);
         }
         mConcurrentCameraIdCombinations.push_back(std::move(deviceIds));
     }
@@ -607,8 +607,9 @@
             aidl::android::hardware::camera::device::CameraMetadata pChars;
             status = interface->getPhysicalCameraCharacteristics(id, &pChars);
             if (!status.isOk()) {
-                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
-                        __FUNCTION__, id.c_str(), id.c_str(), status.getMessage());
+                ALOGE("%s: Transaction error getting physical camera %s characteristics for "
+                        "logical id %s: %s", __FUNCTION__, id.c_str(), mId.c_str(),
+                        status.getMessage());
                 return;
             }
             std::vector<uint8_t> &pMetadata = pChars.metadata;
@@ -735,7 +736,7 @@
     camera::device::StreamConfiguration streamConfiguration;
     bool earlyExit = false;
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
-            String8(mId.c_str()), mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
+            mId, mCameraCharacteristics, mCompositeJpegRDisabled, getMetadata,
             mPhysicalIds, streamConfiguration, overrideForPerfClass, &earlyExit);
 
     if (!bRes.isOk()) {
@@ -789,9 +790,9 @@
             return res;
         }
         camera3::metadataGetter getMetadata =
-                [this](const String8 &id, bool overrideForPerfClass) {
+                [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
-                    mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
+                    mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
                                                    &physicalDeviceInfo,
                                                    /*overrideToPortrait*/false);
                     return physicalDeviceInfo;
@@ -801,7 +802,7 @@
         bStatus =
             SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
-                    String8(cameraId.c_str()), deviceInfo,
+                    cameraId, deviceInfo,
                     mManager->isCompositeJpegRDisabledLocked(cameraId), getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, &shouldExit);
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 8ff5c3f..aeb0abc 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -533,7 +533,7 @@
                 for (auto& combination : cameraDeviceIdCombinations) {
                     std::unordered_set<std::string> deviceIds;
                     for (auto &cameraDeviceId : combination) {
-                        deviceIds.insert(cameraDeviceId.c_str());
+                        deviceIds.insert(cameraDeviceId);
                     }
                     mConcurrentCameraIdCombinations.push_back(std::move(deviceIds));
                 }
@@ -743,8 +743,9 @@
             });
 
             if (!ret.isOk()) {
-                ALOGE("%s: Transaction error getting physical camera %s characteristics for %s: %s",
-                        __FUNCTION__, id.c_str(), id.c_str(), ret.description().c_str());
+                ALOGE("%s: Transaction error getting physical camera %s characteristics for"
+                        " logical id %s: %s", __FUNCTION__, id.c_str(), mId.c_str(),
+                        ret.description().c_str());
                 return;
             }
             if (status != Status::OK) {
@@ -837,7 +838,7 @@
     hardware::camera::device::V3_7::StreamConfiguration configuration_3_7;
     bool earlyExit = false;
     auto bRes = SessionConfigurationUtils::convertToHALStreamCombination(configuration,
-            String8(mId.c_str()), mCameraCharacteristics, getMetadata, mPhysicalIds,
+            mId, mCameraCharacteristics, getMetadata, mPhysicalIds,
             configuration_3_7, overrideForPerfClass, &earlyExit);
 
     if (!bRes.isOk()) {
@@ -927,9 +928,9 @@
             return res;
         }
         camera3::metadataGetter getMetadata =
-                [this](const String8 &id, bool overrideForPerfClass) {
+                [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
-                    mManager->getCameraCharacteristicsLocked(id.string(), overrideForPerfClass,
+                    mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
                             &physicalDeviceInfo, /*overrideToPortrait*/false);
                     return physicalDeviceInfo;
                 };
@@ -938,7 +939,7 @@
         bStatus =
             SessionConfigurationUtils::convertToHALStreamCombination(
                     cameraIdAndSessionConfig.mSessionConfiguration,
-                    String8(cameraId.c_str()), deviceInfo, getMetadata,
+                    cameraId, deviceInfo, getMetadata,
                     physicalCameraIds, streamConfiguration,
                     overrideForPerfClass, &shouldExit);
         if (!bStatus.isOk()) {
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index 2ac38d5..c42e51a 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -18,10 +18,13 @@
 #define LOG_TAG "Camera3-BufferManager"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 
+#include <sstream>
+
 #include <gui/ISurfaceComposer.h>
 #include <private/gui/ComposerService.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 #include "utils/CameraTraces.h"
 #include "Camera3BufferManager.h"
 
@@ -454,34 +457,36 @@
 void Camera3BufferManager::dump(int fd, [[maybe_unused]] const Vector<String16>& args) const {
     Mutex::Autolock l(mLock);
 
-    String8 lines;
-    lines.appendFormat("      Total stream sets: %zu\n", mStreamSetMap.size());
+    std::ostringstream lines;
+    lines << fmt::sprintf("      Total stream sets: %zu\n", mStreamSetMap.size());
     for (size_t i = 0; i < mStreamSetMap.size(); i++) {
-        lines.appendFormat("        Stream set %d(%d) has below streams:\n",
+        lines << fmt::sprintf("        Stream set %d(%d) has below streams:\n",
                 mStreamSetMap.keyAt(i).id, mStreamSetMap.keyAt(i).isMultiRes);
         for (size_t j = 0; j < mStreamSetMap[i].streamInfoMap.size(); j++) {
-            lines.appendFormat("          Stream %d\n", mStreamSetMap[i].streamInfoMap[j].streamId);
+            lines << fmt::sprintf("          Stream %d\n",
+                    mStreamSetMap[i].streamInfoMap[j].streamId);
         }
-        lines.appendFormat("          Stream set max allowed buffer count: %zu\n",
+        lines << fmt::sprintf("          Stream set max allowed buffer count: %zu\n",
                 mStreamSetMap[i].maxAllowedBufferCount);
-        lines.appendFormat("          Stream set buffer count water mark: %zu\n",
+        lines << fmt::sprintf("          Stream set buffer count water mark: %zu\n",
                 mStreamSetMap[i].allocatedBufferWaterMark);
-        lines.appendFormat("          Handout buffer counts:\n");
+        lines << "          Handout buffer counts:\n";
         for (size_t m = 0; m < mStreamSetMap[i].handoutBufferCountMap.size(); m++) {
             int streamId = mStreamSetMap[i].handoutBufferCountMap.keyAt(m);
             size_t bufferCount = mStreamSetMap[i].handoutBufferCountMap.valueAt(m);
-            lines.appendFormat("            stream id: %d, buffer count: %zu.\n",
+            lines << fmt::sprintf("            stream id: %d, buffer count: %zu.\n",
                     streamId, bufferCount);
         }
-        lines.appendFormat("          Attached buffer counts:\n");
+        lines << "          Attached buffer counts:\n";
         for (size_t m = 0; m < mStreamSetMap[i].attachedBufferCountMap.size(); m++) {
             int streamId = mStreamSetMap[i].attachedBufferCountMap.keyAt(m);
             size_t bufferCount = mStreamSetMap[i].attachedBufferCountMap.valueAt(m);
-            lines.appendFormat("            stream id: %d, attached buffer count: %zu.\n",
+            lines << fmt::sprintf("            stream id: %d, attached buffer count: %zu.\n",
                     streamId, bufferCount);
         }
     }
-    write(fd, lines.string(), lines.size());
+    std::string linesStr = std::move(lines.str());
+    write(fd, linesStr.c_str(), linesStr.size());
 }
 
 bool Camera3BufferManager::checkIfStreamRegisteredLocked(int streamId,
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 71e49fd..597b9aa 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -26,10 +26,10 @@
 #endif
 
 // Convenience macro for transient errors
-#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
             ##__VA_ARGS__)
 
-#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
             ##__VA_ARGS__)
 
 // Convenience macros for transitioning to the error state
@@ -44,10 +44,12 @@
 
 #include <utility>
 
+#include <android-base/stringprintf.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <utils/Timers.h>
 #include <cutils/properties.h>
+#include <camera/StringUtils.h>
 
 #include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
@@ -60,9 +62,9 @@
 #include "device3/Camera3InputStream.h"
 #include "device3/Camera3OutputStream.h"
 #include "device3/Camera3SharedOutputStream.h"
-#include "mediautils/SchedulingPolicyService.h"
 #include "utils/CameraThreadState.h"
 #include "utils/CameraTraces.h"
+#include "utils/SchedulingPolicyUtils.h"
 #include "utils/SessionConfigurationUtils.h"
 #include "utils/TraceHFR.h"
 
@@ -75,7 +77,8 @@
 namespace android {
 
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-        const String8 &id, bool overrideForPerfClass, bool overrideToPortrait, bool legacyClient):
+        const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+        bool legacyClient):
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mId(id),
         mLegacyClient(legacyClient),
@@ -107,17 +110,17 @@
         mActivePhysicalId("")
 {
     ATRACE_CALL();
-    ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.c_str());
 }
 
 Camera3Device::~Camera3Device()
 {
     ATRACE_CALL();
-    ALOGV("%s: Tearing down for camera id %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Tearing down for camera id %s", __FUNCTION__, mId.c_str());
     disconnectImpl();
 }
 
-const String8& Camera3Device::getId() const {
+const std::string& Camera3Device::getId() const {
     return mId;
 }
 
@@ -125,7 +128,7 @@
 
     /** Start up status tracker thread */
     mStatusTracker = new StatusTracker(this);
-    status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
+    status_t res = mStatusTracker->run((std::string("C3Dev-") + mId + "-Status").c_str());
     if (res != OK) {
         SET_ERR_L("Unable to start status tracking thread: %s (%d)",
                 strerror(-res), res);
@@ -188,7 +191,7 @@
             this, mStatusTracker, mInterface, sessionParamKeys,
             mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
             mSupportZoomOverride);
-    res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
+    res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
     if (res != OK) {
         SET_ERR_L("Unable to start request queue thread: %s (%d)",
                 strerror(-res), res);
@@ -225,23 +228,23 @@
 
     bool usePrecorrectArray = DistortionMapper::isDistortionSupported(mDeviceInfo);
     if (usePrecorrectArray) {
-        res = mDistortionMappers[mId.c_str()].setupStaticInfo(mDeviceInfo);
+        res = mDistortionMappers[mId].setupStaticInfo(mDeviceInfo);
         if (res != OK) {
             SET_ERR_L("Unable to read necessary calibration fields for distortion correction");
             return res;
         }
     }
 
-    mZoomRatioMappers[mId.c_str()] = ZoomRatioMapper(&mDeviceInfo,
+    mZoomRatioMappers[mId] = ZoomRatioMapper(&mDeviceInfo,
             mSupportNativeZoomRatio, usePrecorrectArray);
 
     if (SessionConfigurationUtils::supportsUltraHighResolutionCapture(mDeviceInfo)) {
-        mUHRCropAndMeteringRegionMappers[mId.c_str()] =
+        mUHRCropAndMeteringRegionMappers[mId] =
                 UHRCropAndMeteringRegionMapper(mDeviceInfo, usePrecorrectArray);
     }
 
     if (RotateAndCropMapper::isNeeded(&mDeviceInfo)) {
-        mRotateAndCropMappers.emplace(mId.c_str(), &mDeviceInfo);
+        mRotateAndCropMappers.emplace(mId, &mDeviceInfo);
     }
 
     // Hidl/AidlCamera3DeviceInjectionMethods
@@ -428,7 +431,7 @@
                     /*isUltraHighResolution*/true);
     if (maxDefaultJpegResolution.width == 0) {
         ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
-                __FUNCTION__, mId.string());
+                __FUNCTION__, mId.c_str());
         return BAD_VALUE;
     }
     bool useMaxSensorPixelModeThreshold = false;
@@ -443,7 +446,7 @@
     camera_metadata_ro_entry jpegBufMaxSize = info.find(ANDROID_JPEG_MAX_SIZE);
     if (jpegBufMaxSize.count == 0) {
         ALOGE("%s: Camera %s: Can't find maximum JPEG size in static metadata!", __FUNCTION__,
-                mId.string());
+                mId.c_str());
         return BAD_VALUE;
     }
     maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
@@ -475,7 +478,7 @@
     camera_metadata_ro_entry maxPointCount = info.find(ANDROID_DEPTH_MAX_DEPTH_SAMPLES);
     if (maxPointCount.count == 0) {
         ALOGE("%s: Camera %s: Can't find maximum depth point cloud size in static metadata!",
-                __FUNCTION__, mId.string());
+                __FUNCTION__, mId.c_str());
         return BAD_VALUE;
     }
     ssize_t maxBytesForPointCloud = sizeof(android_depth_points) +
@@ -497,7 +500,7 @@
     size_t count = rawOpaqueSizes.count;
     if (count == 0 || (count % PER_CONFIGURATION_SIZE)) {
         ALOGE("%s: Camera %s: bad opaque RAW size static metadata length(%zu)!",
-                __FUNCTION__, mId.string(), count);
+                __FUNCTION__, mId.c_str(), count);
         return BAD_VALUE;
     }
 
@@ -509,7 +512,7 @@
     }
 
     ALOGE("%s: Camera %s: cannot find size for %dx%d opaque RAW image!",
-            __FUNCTION__, mId.string(), width, height);
+            __FUNCTION__, mId.c_str(), width, height);
     return BAD_VALUE;
 }
 
@@ -523,10 +526,10 @@
 
     ALOGW_IF(!gotInterfaceLock,
             "Camera %s: %s: Unable to lock interface lock, proceeding anyway",
-            mId.string(), __FUNCTION__);
+            mId.c_str(), __FUNCTION__);
     ALOGW_IF(!gotLock,
             "Camera %s: %s: Unable to lock main lock, proceeding anyway",
-            mId.string(), __FUNCTION__);
+            mId.c_str(), __FUNCTION__);
 
     bool dumpTemplates = false;
 
@@ -536,9 +539,9 @@
         if (args[i] == templatesOption) {
             dumpTemplates = true;
         }
-        if (args[i] == TagMonitor::kMonitorOption) {
+        if (args[i] == toString16(TagMonitor::kMonitorOption)) {
             if (i + 1 < n) {
-                String8 monitorTags = String8(args[i + 1]);
+                std::string monitorTags = toStdString(args[i + 1]);
                 if (monitorTags == "off") {
                     mTagMonitor.disableMonitoring();
                 } else {
@@ -550,7 +553,7 @@
         }
     }
 
-    String8 lines;
+    std::string lines;
 
     const char *status =
             mStatus == STATUS_ERROR         ? "ERROR" :
@@ -560,42 +563,42 @@
             mStatus == STATUS_ACTIVE        ? "ACTIVE" :
             "Unknown";
 
-    lines.appendFormat("    Device status: %s\n", status);
+    lines += fmt::sprintf("    Device status: %s\n", status);
     if (mStatus == STATUS_ERROR) {
-        lines.appendFormat("    Error cause: %s\n", mErrorCause.string());
+        lines += fmt::sprintf("    Error cause: %s\n", mErrorCause.c_str());
     }
-    lines.appendFormat("    Stream configuration:\n");
+    lines += "    Stream configuration:\n";
     const char *mode =
             mOperatingMode == CAMERA_STREAM_CONFIGURATION_NORMAL_MODE ? "NORMAL" :
             mOperatingMode == CAMERA_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE ?
                     "CONSTRAINED_HIGH_SPEED" : "CUSTOM";
-    lines.appendFormat("    Operation mode: %s (%d) \n", mode, mOperatingMode);
+    lines += fmt::sprintf("    Operation mode: %s (%d) \n", mode, mOperatingMode);
 
     if (mInputStream != NULL) {
-        write(fd, lines.string(), lines.size());
+        write(fd, lines.c_str(), lines.size());
         mInputStream->dump(fd, args);
     } else {
-        lines.appendFormat("      No input stream.\n");
-        write(fd, lines.string(), lines.size());
+        lines += "      No input stream.\n";
+        write(fd, lines.c_str(), lines.size());
     }
     for (size_t i = 0; i < mOutputStreams.size(); i++) {
         mOutputStreams[i]->dump(fd,args);
     }
 
     if (mBufferManager != NULL) {
-        lines = String8("    Camera3 Buffer Manager:\n");
-        write(fd, lines.string(), lines.size());
+        lines = "    Camera3 Buffer Manager:\n";
+        write(fd, lines.c_str(), lines.size());
         mBufferManager->dump(fd, args);
     }
 
-    lines = String8("    In-flight requests:\n");
+    lines = "    In-flight requests:\n";
     if (mInFlightLock.try_lock()) {
         if (mInFlightMap.size() == 0) {
-            lines.append("      None\n");
+            lines += "      None\n";
         } else {
             for (size_t i = 0; i < mInFlightMap.size(); i++) {
                 InFlightRequest r = mInFlightMap.valueAt(i);
-                lines.appendFormat("      Frame %d |  Timestamp: %" PRId64 ", metadata"
+                lines += fmt::sprintf("      Frame %d |  Timestamp: %" PRId64 ", metadata"
                         " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
                         r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
                         r.numBuffersLeft);
@@ -603,9 +606,9 @@
         }
         mInFlightLock.unlock();
     } else {
-        lines.append("      Failed to acquire In-flight lock!\n");
+        lines += "      Failed to acquire In-flight lock!\n";
     }
-    write(fd, lines.string(), lines.size());
+    write(fd, lines.c_str(), lines.size());
 
     if (mRequestThread != NULL) {
         mRequestThread->dumpCaptureRequestLatency(fd,
@@ -613,8 +616,8 @@
     }
 
     {
-        lines = String8("    Last request sent:\n");
-        write(fd, lines.string(), lines.size());
+        lines = "    Last request sent:\n";
+        write(fd, lines.c_str(), lines.size());
 
         CameraMetadata lastRequest = getLatestRequestLocked();
         lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
@@ -634,12 +637,12 @@
             camera_metadata_t *templateRequest = nullptr;
             mInterface->constructDefaultRequestSettings(
                     (camera_request_template_t) i, &templateRequest);
-            lines = String8::format("    HAL Request %s:\n", templateNames[i-1]);
+            lines = fmt::sprintf("    HAL Request %s:\n", templateNames[i-1]);
             if (templateRequest == nullptr) {
-                lines.append("       Not supported\n");
-                write(fd, lines.string(), lines.size());
+                lines += "       Not supported\n";
+                write(fd, lines.c_str(), lines.size());
             } else {
-                write(fd, lines.string(), lines.size());
+                write(fd, lines.c_str(), lines.size());
                 dump_indented_camera_metadata(templateRequest,
                         fd, /*verbosity*/2, /*indentation*/8);
             }
@@ -650,8 +653,8 @@
     mTagMonitor.dumpMonitoredMetadata(fd);
 
     if (mInterface->valid()) {
-        lines = String8("     HAL device dump:\n");
-        write(fd, lines.string(), lines.size());
+        lines = "     HAL device dump:\n";
+        write(fd, lines.c_str(), lines.size());
         mInterface->dump(fd);
     }
 
@@ -661,7 +664,7 @@
     return OK;
 }
 
-status_t Camera3Device::startWatchingTags(const String8 &tags) {
+status_t Camera3Device::startWatchingTags(const std::string &tags) {
     mTagMonitor.parseTagsToMonitor(tags);
     return OK;
 }
@@ -676,7 +679,7 @@
     return OK;
 }
 
-const CameraMetadata& Camera3Device::infoPhysical(const String8& physicalId) const {
+const CameraMetadata& Camera3Device::infoPhysical(const std::string& physicalId) const {
     ALOGVV("%s: E", __FUNCTION__);
     if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED ||
                     mStatus == STATUS_ERROR)) {
@@ -684,12 +687,11 @@
                 mStatus == STATUS_ERROR ?
                 "when in error state" : "before init");
     }
-    if (physicalId.isEmpty()) {
+    if (physicalId.empty()) {
         return mDeviceInfo;
     } else {
-        std::string id(physicalId.c_str());
-        if (mPhysicalDeviceInfoMap.find(id) != mPhysicalDeviceInfoMap.end()) {
-            return mPhysicalDeviceInfoMap.at(id);
+        if (mPhysicalDeviceInfoMap.find(physicalId) != mPhysicalDeviceInfoMap.end()) {
+            return mPhysicalDeviceInfoMap.at(physicalId);
         } else {
             ALOGE("%s: Invalid physical camera id %s", __FUNCTION__, physicalId.c_str());
             return mDeviceInfo;
@@ -698,8 +700,7 @@
 }
 
 const CameraMetadata& Camera3Device::info() const {
-    String8 emptyId;
-    return infoPhysical(emptyId);
+    return infoPhysical(/*physicalId*/ std::string());
 }
 
 status_t Camera3Device::checkStatusOkToCaptureLocked() {
@@ -793,7 +794,7 @@
         std::list<const SurfaceMap>& surfaceMaps,
         const CameraMetadata& request) {
     PhysicalCameraSettingsList requestList;
-    requestList.push_back({std::string(getId().string()), request});
+    requestList.push_back({getId(), request});
     requestsList.push_back(requestList);
 
     SurfaceMap surfaceMap;
@@ -845,7 +846,7 @@
             SET_ERR_L("Can't transition to active in %f seconds!",
                     kActiveTimeout/1e9);
         }
-        ALOGV("Camera %s: Capture request %" PRId32 " enqueued", mId.string(),
+        ALOGV("Camera %s: Capture request %" PRId32 " enqueued", mId.c_str(),
               (*(requestList.begin()))->mResultExtras.requestId);
     } else {
         CLOGE("Cannot queue request. Impossible.");
@@ -928,7 +929,7 @@
             SET_ERR_L("Unexpected status: %d", mStatus);
             return INVALID_OPERATION;
     }
-    ALOGV("Camera %s: Clearing repeating request", mId.string());
+    ALOGV("Camera %s: Clearing repeating request", mId.c_str());
 
     return mRequestThread->clearRepeatingRequests(lastFrameNumber);
 }
@@ -947,7 +948,7 @@
     nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
     Mutex::Autolock l(mLock);
     ALOGV("Camera %s: Creating new input stream %d: %d x %d, format %d",
-            mId.string(), mNextStreamId, width, height, format);
+            mId.c_str(), mNextStreamId, width, height, format);
 
     status_t res;
     bool wasActive = false;
@@ -1006,14 +1007,14 @@
         internalResumeLocked();
     }
 
-    ALOGV("Camera %s: Created input stream", mId.string());
+    ALOGV("Camera %s: Created input stream", mId.c_str());
     return OK;
 }
 
 status_t Camera3Device::createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
             uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
@@ -1049,7 +1050,8 @@
 status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
         bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-        const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
+        const std::string& physicalCameraId,
+        const std::unordered_set<int32_t> &sensorPixelModesUsed,
         std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
         uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
         int timestampBase, int mirrorMode, int32_t colorSpace, bool useReadoutTimestamp) {
@@ -1062,8 +1064,8 @@
             " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
             " dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
             " mirrorMode %d, colorSpace %d, useReadoutTimestamp %d",
-            mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
-            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
+            mId.c_str(), mNextStreamId, width, height, format, dataSpace, rotation,
+            consumerUsage, isShared, physicalCameraId.c_str(), isMultiResolution,
             dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace,
             useReadoutTimestamp);
 
@@ -1214,7 +1216,7 @@
         }
         internalResumeLocked();
     }
-    ALOGV("Camera %s: Created new stream", mId.string());
+    ALOGV("Camera %s: Created new stream", mId.c_str());
     return OK;
 }
 
@@ -1299,18 +1301,18 @@
     Mutex::Autolock l(mLock);
     status_t res;
 
-    ALOGV("%s: Camera %s: Deleting stream %d", __FUNCTION__, mId.string(), id);
+    ALOGV("%s: Camera %s: Deleting stream %d", __FUNCTION__, mId.c_str(), id);
 
     // CameraDevice semantics require device to already be idle before
     // deleteStream is called, unlike for createStream.
     if (mStatus == STATUS_ACTIVE) {
-        ALOGW("%s: Camera %s: Device not idle", __FUNCTION__, mId.string());
+        ALOGW("%s: Camera %s: Device not idle", __FUNCTION__, mId.c_str());
         return -EBUSY;
     }
 
     if (mStatus == STATUS_ERROR) {
         ALOGW("%s: Camera %s: deleteStream not allowed in ERROR state",
-                __FUNCTION__, mId.string());
+                __FUNCTION__, mId.c_str());
         return -EBUSY;
     }
 
@@ -1502,7 +1504,7 @@
         mRequestTemplateCache[templateId].acquire(rawRequest);
 
         // Override the template request with zoomRatioMapper
-        res = mZoomRatioMappers[mId.c_str()].initZoomRatioInTemplate(
+        res = mZoomRatioMappers[mId].initZoomRatioInTemplate(
                 &mRequestTemplateCache[templateId]);
         if (res != OK) {
             CLOGE("Failed to update zoom ratio for template %d: %s (%d)",
@@ -1555,7 +1557,7 @@
             SET_ERR_L("Unexpected status: %d",mStatus);
             return INVALID_OPERATION;
     }
-    ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.string(),
+    ALOGV("%s: Camera %s: Waiting until idle (%" PRIi64 "ns)", __FUNCTION__, mId.c_str(),
             maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
                            /*requestThreadInvocation*/ false);
@@ -1583,7 +1585,7 @@
         return NO_INIT;
     }
 
-    ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
+    ALOGV("%s: Camera %s: Internal wait until idle (% " PRIi64 " ns)", __FUNCTION__, mId.c_str(),
           maxExpectedDuration);
     status_t res = waitUntilStateThenRelock(/*active*/ false, maxExpectedDuration,
                            requestThreadInvocation);
@@ -1602,7 +1604,7 @@
 
     mRequestThread->setPaused(false);
 
-    ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.string(),
+    ALOGV("%s: Camera %s: Internal wait until active (% " PRIi64 " ns)", __FUNCTION__, mId.c_str(),
             kActiveTimeout);
     // internalResumeLocked is always called from a binder thread.
     res = waitUntilStateThenRelock(/*active*/ true, kActiveTimeout,
@@ -1819,7 +1821,7 @@
 
 status_t Camera3Device::flush(int64_t *frameNumber) {
     ATRACE_CALL();
-    ALOGV("%s: Camera %s: Flushing all requests", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Flushing all requests", __FUNCTION__, mId.c_str());
     Mutex::Autolock il(mInterfaceLock);
 
     {
@@ -1851,7 +1853,7 @@
 
 status_t Camera3Device::prepare(int maxCount, int streamId) {
     ATRACE_CALL();
-    ALOGV("%s: Camera %s: Preparing stream %d", __FUNCTION__, mId.string(), streamId);
+    ALOGV("%s: Camera %s: Preparing stream %d", __FUNCTION__, mId.c_str(), streamId);
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -1876,7 +1878,7 @@
 
 status_t Camera3Device::tearDown(int streamId) {
     ATRACE_CALL();
-    ALOGV("%s: Camera %s: Tearing down stream %d", __FUNCTION__, mId.string(), streamId);
+    ALOGV("%s: Camera %s: Tearing down stream %d", __FUNCTION__, mId.c_str(), streamId);
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -1897,7 +1899,8 @@
 status_t Camera3Device::addBufferListenerForStream(int streamId,
         wp<Camera3StreamBufferListener> listener) {
     ATRACE_CALL();
-    ALOGV("%s: Camera %s: Adding buffer listener for stream %d", __FUNCTION__, mId.string(), streamId);
+    ALOGV("%s: Camera %s: Adding buffer listener for stream %d", __FUNCTION__, mId.c_str(),
+            streamId);
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
@@ -1945,7 +1948,7 @@
         if (mStatus != STATUS_ACTIVE && mStatus != STATUS_CONFIGURED) {
             return;
         }
-        ALOGV("%s: Camera %s: Now %s, pauseState: %s", __FUNCTION__, mId.string(),
+        ALOGV("%s: Camera %s: Now %s, pauseState: %s", __FUNCTION__, mId.c_str(),
                 idle ? "idle" : "active", mPauseStateNotify ? "true" : "false");
         internalUpdateStatusLocked(idle ? STATUS_CONFIGURED : STATUS_ACTIVE);
 
@@ -2026,7 +2029,7 @@
         const std::vector<sp<Surface>>& consumers, std::vector<int> *surfaceIds) {
     ATRACE_CALL();
     ALOGV("%s: Camera %s: set consumer surface for stream %d",
-            __FUNCTION__, mId.string(), streamId);
+            __FUNCTION__, mId.c_str(), streamId);
 
     if (surfaceIds == nullptr) {
         return BAD_VALUE;
@@ -2306,7 +2309,7 @@
 
     res = mPreparerThread->resume();
     if (res != OK) {
-        ALOGE("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.string());
+        ALOGE("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.c_str());
     }
 }
 
@@ -2466,7 +2469,7 @@
     overrideStreamUseCaseLocked();
 
     // Start configuring the streams
-    ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.c_str());
 
     mPreparerThread->pause();
 
@@ -2522,7 +2525,7 @@
                      static_cast<android_dataspace_t>(
                          aidl::android::hardware::graphics::common::Dataspace::JPEG_R))) {
                 bufferSizes[k] = static_cast<uint32_t>(
-                        getJpegBufferSize(infoPhysical(String8(outputStream->physical_camera_id)),
+                        getJpegBufferSize(infoPhysical(outputStream->physical_camera_id),
                                 outputStream->width, outputStream->height));
             } else if (outputStream->data_space ==
                     static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
@@ -2535,7 +2538,7 @@
 
         if (mOutputStreams[i]->isMultiResolution()) {
             int32_t streamGroupId = mOutputStreams[i]->getHalStreamGroupId();
-            const String8& physicalCameraId = mOutputStreams[i]->getPhysicalCameraId();
+            const std::string &physicalCameraId = mOutputStreams[i]->getPhysicalCameraId();
             mGroupIdPhysicalCameraMap[streamGroupId].insert(physicalCameraId);
         }
 
@@ -2625,8 +2628,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = requestPriority(getpid(), requestThreadTid,
-                kRequestThreadPriority, /*isForApp*/ false, /*asynchronous*/ false);
+        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
+                kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
@@ -2650,14 +2653,14 @@
     internalUpdateStatusLocked((mFakeStreamId == NO_STREAM) ?
             STATUS_CONFIGURED : STATUS_UNCONFIGURED);
 
-    ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.c_str());
 
     // tear down the deleted streams after configure streams.
     mDeletedStreams.clear();
 
     auto rc = mPreparerThread->resume();
     if (rc != OK) {
-        SET_ERR_L("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.string());
+        SET_ERR_L("%s: Camera %s: Preparer thread failed to resume!", __FUNCTION__, mId.c_str());
         return rc;
     }
 
@@ -2671,7 +2674,7 @@
     // configure the injection streams.
     if (mInjectionMethods->isInjecting()) {
         ALOGD("%s: Injection camera %s: Start to configure streams.",
-              __FUNCTION__, mInjectionMethods->getInjectedCamId().string());
+              __FUNCTION__, mInjectionMethods->getInjectedCamId().c_str());
         res = mInjectionMethods->injectCamera(config, bufferSizes);
         if (res != OK) {
             ALOGE("Can't finish inject camera process!");
@@ -2700,11 +2703,11 @@
         // Should never be adding a second fake stream when one is already
         // active
         SET_ERR_L("%s: Camera %s: A fake stream already exists!",
-                __FUNCTION__, mId.string());
+                __FUNCTION__, mId.c_str());
         return INVALID_OPERATION;
     }
 
-    ALOGV("%s: Camera %s: Adding a fake stream", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Adding a fake stream", __FUNCTION__, mId.c_str());
 
     sp<Camera3OutputStreamInterface> fakeStream =
             new Camera3FakeStream(mNextStreamId);
@@ -2728,7 +2731,7 @@
     if (mFakeStreamId == NO_STREAM) return OK;
     if (mOutputStreams.size() == 1) return OK;
 
-    ALOGV("%s: Camera %s: Removing the fake stream", __FUNCTION__, mId.string());
+    ALOGV("%s: Camera %s: Removing the fake stream", __FUNCTION__, mId.c_str());
 
     // Ok, have a fake stream and there's at least one other output stream,
     // so remove the fake
@@ -2780,8 +2783,9 @@
 
 void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) {
     // Print out all error messages to log
-    String8 errorCause = String8::formatV(fmt, args);
-    ALOGE("Camera %s: %s", mId.string(), errorCause.string());
+    std::string errorCause;
+    base::StringAppendV(&errorCause, fmt, args);
+    ALOGE("Camera %s: %s", mId.c_str(), errorCause.c_str());
 
     // But only do error state transition steps for the first error
     if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
@@ -2813,7 +2817,7 @@
 status_t Camera3Device::registerInFlight(uint32_t frameNumber,
         int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
         bool hasAppCallback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
-        bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
+        bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
         bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
         const std::set<std::string>& cameraIdsWithZoom,
         const SurfaceMap& outputSurfaces, nsecs_t requestTimeNs) {
@@ -3065,7 +3069,7 @@
 
 void Camera3Device::RequestThread::configurationComplete(bool isConstrainedHighSpeed,
         const CameraMetadata& sessionParams,
-        const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap) {
+        const std::map<int32_t, std::set<std::string>>& groupIdPhysicalCameraMap) {
     ATRACE_CALL();
     Mutex::Autolock l(mRequestLock);
     mReconfigured = true;
@@ -3118,8 +3122,8 @@
     return OK;
 }
 
-const String8& Camera3Device::RequestThread::getId(const wp<Camera3Device> &device) {
-    static String8 deadId("<DeadDevice>");
+const std::string& Camera3Device::RequestThread::getId(const wp<Camera3Device> &device) {
+    static std::string deadId("<DeadDevice>");
     sp<Camera3Device> d = device.promote();
     if (d != nullptr) return d->mId;
     return deadId;
@@ -3929,7 +3933,7 @@
         outputBuffers->insertAt(camera_stream_buffer_t(), 0,
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
-        std::set<std::set<String8>> requestedPhysicalCameras;
+        std::set<std::set<std::string>> requestedPhysicalCameras;
 
         sp<Camera3Device> parent = mParent.promote();
         if (parent == NULL) {
@@ -4010,14 +4014,14 @@
             {
                 sp<Camera3Device> parent = mParent.promote();
                 if (parent != nullptr) {
-                    const String8& streamCameraId = outputStream->getPhysicalCameraId();
+                    const std::string& streamCameraId = outputStream->getPhysicalCameraId();
                     // Consider the case where clients are sending a single logical camera request
                     // to physical output/outputs
                     bool singleRequest = captureRequest->mSettingsList.size() == 1;
                     for (const auto& settings : captureRequest->mSettingsList) {
-                        if (((streamCameraId.isEmpty() || singleRequest) &&
-                                parent->getId() == settings.cameraId.c_str()) ||
-                                streamCameraId == settings.cameraId.c_str()) {
+                        if (((streamCameraId.empty() || singleRequest) &&
+                                parent->getId() == settings.cameraId) ||
+                                streamCameraId == settings.cameraId) {
                             outputStream->fireBufferRequestForFrameNumber(
                                     captureRequest->mResultExtras.frameNumber,
                                     settings.metadata);
@@ -4026,12 +4030,12 @@
                 }
             }
 
-            String8 physicalCameraId = outputStream->getPhysicalCameraId();
+            const std::string &physicalCameraId = outputStream->getPhysicalCameraId();
             int32_t streamGroupId = outputStream->getHalStreamGroupId();
             if (streamGroupId != -1 && mGroupIdPhysicalCameraMap.count(streamGroupId) == 1) {
                 requestedPhysicalCameras.insert(mGroupIdPhysicalCameraMap[streamGroupId]);
-            } else if (!physicalCameraId.isEmpty()) {
-                requestedPhysicalCameras.insert(std::set<String8>({physicalCameraId}));
+            } else if (!physicalCameraId.empty()) {
+                requestedPhysicalCameras.insert(std::set<std::string>({physicalCameraId}));
             }
             halRequest->num_output_buffers++;
         }
@@ -5466,9 +5470,9 @@
     return mRequestThread->setZoomOverride(zoomOverride);
 }
 
-status_t Camera3Device::injectCamera(const String8& injectedCamId,
+status_t Camera3Device::injectCamera(const std::string& injectedCamId,
                                      sp<CameraProviderManager> manager) {
-    ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.string());
+    ALOGI("%s Injection camera: injectedCamId = %s", __FUNCTION__, injectedCamId.c_str());
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     // When the camera device is active, injectCamera() and stopInjection() will call
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index aa1d55a..a1e25fd 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -85,7 +85,7 @@
   public:
 
     explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-            const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~Camera3Device();
@@ -101,7 +101,7 @@
      * CameraDeviceBase interface
      */
 
-    const String8& getId() const override;
+    const std::string& getId() const override;
 
     metadata_vendor_id_t getVendorTagId() const override { return mVendorTagId; }
 
@@ -110,15 +110,15 @@
 
     // Transitions to idle state on success.
     virtual status_t initialize(sp<CameraProviderManager> /*manager*/,
-            const String8& /*monitorTags*/) = 0;
+            const std::string& /*monitorTags*/) = 0;
 
     status_t disconnect() override;
     status_t dump(int fd, const Vector<String16> &args) override;
-    status_t startWatchingTags(const String8 &tags) override;
+    status_t startWatchingTags(const std::string &tags) override;
     status_t stopWatchingTags() override;
     status_t dumpWatchedEventsToVector(std::vector<std::string> &out) override;
     const CameraMetadata& info() const override;
-    const CameraMetadata& infoPhysical(const String8& physicalId) const override;
+    const CameraMetadata& infoPhysical(const std::string& physicalId) const override;
     bool isCompositeJpegRDisabled() const override { return mIsCompositeJpegRDisabled; };
 
     // Capture and setStreamingRequest will configure streams if currently in
@@ -144,7 +144,7 @@
     status_t createStream(sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
@@ -162,7 +162,7 @@
     status_t createStream(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             std::vector<int> *surfaceIds = nullptr,
             int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
@@ -330,7 +330,7 @@
      * The injection camera session to replace the internal camera
      * session.
      */
-    status_t injectCamera(const String8& injectedCamId,
+    status_t injectCamera(const std::string& injectedCamId,
                           sp<CameraProviderManager> manager);
 
     /**
@@ -376,7 +376,7 @@
     Mutex                      mLock;
 
     // Camera device ID
-    const String8              mId;
+    const std::string          mId;
 
     // Legacy camera client flag
     bool                       mLegacyClient;
@@ -586,14 +586,14 @@
     Condition                  mStatusChanged;
 
     // Tracking cause of fatal errors when in STATUS_ERROR
-    String8                    mErrorCause;
+    std::string                mErrorCause;
 
     camera3::StreamSet         mOutputStreams;
     sp<camera3::Camera3Stream> mInputStream;
     bool                       mIsInputStreamMultiResolution;
     SessionStatsBuilder        mSessionStatsBuilder;
     // Map from stream group ID to physical cameras backing the stream group
-    std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
+    std::map<int32_t, std::set<std::string>> mGroupIdPhysicalCameraMap;
 
     int                        mNextStreamId;
     bool                       mNeedConfig;
@@ -709,7 +709,7 @@
      */
     virtual CameraMetadata getLatestRequestLocked();
 
-    virtual status_t injectionCameraInitialize(const String8 &injectCamId,
+    virtual status_t injectionCameraInitialize(const std::string &injectCamId,
             sp<CameraProviderManager> manager) = 0;
 
     /**
@@ -891,7 +891,7 @@
          */
         void     configurationComplete(bool isConstrainedHighSpeed,
                 const CameraMetadata& sessionParams,
-                const std::map<int32_t, std::set<String8>>& groupIdPhysicalCameraMap);
+                const std::map<int32_t, std::set<std::string>>& groupIdPhysicalCameraMap);
 
         /**
          * Set or clear the list of repeating requests. Does not block
@@ -996,7 +996,7 @@
 
         virtual bool threadLoop();
 
-        static const String8& getId(const wp<Camera3Device> &device);
+        static const std::string& getId(const wp<Camera3Device> &device);
 
         status_t           queueTriggerLocked(RequestTrigger trigger);
         // Mix-in queued triggers into this request
@@ -1102,7 +1102,7 @@
 
         wp<NotificationListener> mListener;
 
-        const String8      mId;       // The camera ID
+        const std::string  mId;       // The camera ID
         int                mStatusId; // The RequestThread's component ID for
                                       // status tracking
 
@@ -1174,7 +1174,7 @@
         Vector<int32_t>    mSessionParamKeys;
         CameraMetadata     mLatestSessionParams;
 
-        std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
+        std::map<int32_t, std::set<std::string>> mGroupIdPhysicalCameraMap;
 
         const bool         mUseHalBufManager;
         const bool         mSupportCameraMute;
@@ -1210,7 +1210,7 @@
     status_t registerInFlight(uint32_t frameNumber,
             int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,
             bool callback, nsecs_t minExpectedDuration, nsecs_t maxExpectedDuration,
-            bool isFixedFps, const std::set<std::set<String8>>& physicalCameraIds,
+            bool isFixedFps, const std::set<std::set<std::string>>& physicalCameraIds,
             bool isStillCapture, bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
             const std::set<std::string>& cameraIdsWithZoom, const SurfaceMap& outputSurfaces,
             nsecs_t requestTimeNs);
@@ -1504,7 +1504,7 @@
 
         bool isStreamConfigCompleteButNotInjected();
 
-        const String8& getInjectedCamId() const;
+        const std::string& getInjectedCamId() const;
 
         void getInjectionConfig(/*out*/ camera3::camera_stream_configuration* injectionConfig,
                 /*out*/ std::vector<uint32_t>* injectionBufferSizes);
@@ -1558,7 +1558,7 @@
         Mutex mInjectionLock;
 
         // The injection camera ID.
-        String8 mInjectedCamId;
+        std::string mInjectedCamId;
     };
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
index ab772d4..b0e4ca3 100644
--- a/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DeviceInjectionMethods.cpp
@@ -157,7 +157,7 @@
     return mIsStreamConfigCompleteButNotInjected;
 }
 
-const String8& Camera3Device::Camera3DeviceInjectionMethods::getInjectedCamId()
+const std::string& Camera3Device::Camera3DeviceInjectionMethods::getInjectedCamId()
         const {
     return mInjectedCamId;
 }
@@ -206,7 +206,7 @@
 
     // Start configuring the streams
     ALOGV("%s: Injection camera %s: Starting stream configuration", __FUNCTION__,
-            mInjectedCamId.string());
+            mInjectedCamId.c_str());
 
     parent->mPreparerThread->pause();
 
@@ -249,12 +249,12 @@
     parent->internalUpdateStatusLocked(STATUS_CONFIGURED);
 
     ALOGV("%s: Injection camera %s: Stream configuration complete", __FUNCTION__,
-            mInjectedCamId.string());
+            mInjectedCamId.c_str());
 
     auto rc = parent->mPreparerThread->resume();
     if (rc != OK) {
         ALOGE("%s: Injection camera %s: Preparer thread failed to resume!",
-                 __FUNCTION__, mInjectedCamId.string());
+                 __FUNCTION__, mInjectedCamId.c_str());
         return rc;
     }
 
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
index 8c0ac71..75162bf 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.cpp
@@ -20,13 +20,14 @@
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 #include "Camera3FakeStream.h"
 
 namespace android {
 
 namespace camera3 {
 
-const String8 Camera3FakeStream::FAKE_ID;
+const std::string Camera3FakeStream::FAKE_ID;
 
 Camera3FakeStream::Camera3FakeStream(int id) :
         Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, FAKE_WIDTH, FAKE_HEIGHT,
@@ -68,9 +69,9 @@
 }
 
 void Camera3FakeStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
-    String8 lines;
-    lines.appendFormat("    Stream[%d]: Fake\n", mId);
-    write(fd, lines.string(), lines.size());
+    std::string lines;
+    lines += fmt::sprintf("    Stream[%d]: Fake\n", mId);
+    write(fd, lines.c_str(), lines.size());
 
     Camera3IOStreamBase::dump(fd, args);
 }
@@ -115,7 +116,7 @@
     return OK;
 }
 
-const String8& Camera3FakeStream::getPhysicalCameraId() const {
+const std::string& Camera3FakeStream::getPhysicalCameraId() const {
     return FAKE_ID;
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3FakeStream.h b/services/camera/libcameraservice/device3/Camera3FakeStream.h
index 1e9f478..1d82190 100644
--- a/services/camera/libcameraservice/device3/Camera3FakeStream.h
+++ b/services/camera/libcameraservice/device3/Camera3FakeStream.h
@@ -65,7 +65,7 @@
     /**
      * Query the physical camera id for the output stream.
      */
-    virtual const String8& getPhysicalCameraId() const override;
+    virtual const std::string& getPhysicalCameraId() const override;
 
     /**
      * Return if this output stream is for video encoding.
@@ -130,7 +130,7 @@
     static const android_dataspace FAKE_DATASPACE = HAL_DATASPACE_UNKNOWN;
     static const camera_stream_rotation_t FAKE_ROTATION = CAMERA_STREAM_ROTATION_0;
     static const uint64_t FAKE_USAGE = GRALLOC_USAGE_HW_COMPOSER;
-    static const String8 FAKE_ID;
+    static const std::string FAKE_ID;
 
     /**
      * Internal Camera3Stream interface
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index fbaaf7b..c59138c 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -18,10 +18,13 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <sstream>
+
 #include <inttypes.h>
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 #include "device3/Camera3IOStreamBase.h"
 #include "device3/StatusTracker.h"
 
@@ -32,7 +35,7 @@
 Camera3IOStreamBase::Camera3IOStreamBase(int id, camera_stream_type_t type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        const String8& physicalCameraId,
+        const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
         bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
@@ -75,32 +78,34 @@
 }
 
 void Camera3IOStreamBase::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
-    String8 lines;
+    std::ostringstream lines;
 
     uint64_t consumerUsage = 0;
     status_t res = getEndpointUsage(&consumerUsage);
     if (res != OK) consumerUsage = 0;
 
-    lines.appendFormat("      State: %d\n", mState);
-    lines.appendFormat("      Dims: %d x %d, format 0x%x, dataspace 0x%x\n",
+    lines << fmt::sprintf("      State: %d\n", mState);
+    lines << fmt::sprintf("      Dims: %d x %d, format 0x%x, dataspace 0x%x\n",
             camera_stream::width, camera_stream::height,
             camera_stream::format, camera_stream::data_space);
-    lines.appendFormat("      Max size: %zu\n", mMaxSize);
-    lines.appendFormat("      Combined usage: 0x%" PRIx64 ", max HAL buffers: %d\n",
+    lines << fmt::sprintf("      Max size: %zu\n", mMaxSize);
+    lines << fmt::sprintf("      Combined usage: 0x%" PRIx64 ", max HAL buffers: %d\n",
             mUsage | consumerUsage, camera_stream::max_buffers);
-    if (strlen(camera_stream::physical_camera_id) > 0) {
-        lines.appendFormat("      Physical camera id: %s\n", camera_stream::physical_camera_id);
+    if (!camera_stream::physical_camera_id.empty()) {
+        lines << "      Physical camera id: " << camera_stream::physical_camera_id << "\n";
     }
-    lines.appendFormat("      Dynamic Range Profile: 0x%" PRIx64 "\n",
+    lines << fmt::sprintf("      Dynamic Range Profile: 0x%" PRIx64 "\n",
             camera_stream::dynamic_range_profile);
-    lines.appendFormat("      Color Space: %d\n", camera_stream::color_space);
-    lines.appendFormat("      Stream use case: %" PRId64 "\n", camera_stream::use_case);
-    lines.appendFormat("      Timestamp base: %d\n", getTimestampBase());
-    lines.appendFormat("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
+    lines << fmt::sprintf("      Color Space: %d\n", camera_stream::color_space);
+    lines << fmt::sprintf("      Stream use case: %" PRId64 "\n", camera_stream::use_case);
+    lines << fmt::sprintf("      Timestamp base: %d\n", getTimestampBase());
+    lines << fmt::sprintf("      Frames produced: %d, last timestamp: %" PRId64 " ns\n",
             mFrameCount, mLastTimestamp);
-    lines.appendFormat("      Total buffers: %zu, currently dequeued: %zu, currently cached: %zu\n",
-            mTotalBufferCount, mHandoutTotalBufferCount, mCachedOutputBufferCount);
-    write(fd, lines.string(), lines.size());
+    lines << fmt::sprintf("      Total buffers: %zu, currently dequeued: %zu, "
+            "currently cached: %zu\n", mTotalBufferCount, mHandoutTotalBufferCount,
+            mCachedOutputBufferCount);
+    std::string linesStr = std::move(lines.str());
+    write(fd, linesStr.c_str(), linesStr.size());
 
     Camera3Stream::dump(fd, args);
 }
@@ -268,7 +273,7 @@
     // carry on
 
     if (releaseFence != 0) {
-        mCombinedFence = Fence::merge(mName, mCombinedFence, releaseFence);
+        mCombinedFence = Fence::merge(toString8(mName), mCombinedFence, releaseFence);
     }
 
     if (output) {
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 6af0875..239fc71 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -35,7 +35,7 @@
     Camera3IOStreamBase(int id, camera_stream_type_t type,
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 631bb43..54ffbd7 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -21,13 +21,14 @@
 #include <gui/BufferItem.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 #include "Camera3InputStream.h"
 
 namespace android {
 
 namespace camera3 {
 
-const String8 Camera3InputStream::FAKE_ID;
+const std::string Camera3InputStream::FAKE_ID;
 
 Camera3InputStream::Camera3InputStream(int id,
         uint32_t width, uint32_t height, int format) :
@@ -216,9 +217,9 @@
 }
 
 void Camera3InputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
-    String8 lines;
-    lines.appendFormat("    Stream[%d]: Input\n", mId);
-    write(fd, lines.string(), lines.size());
+    std::string lines;
+    lines += fmt::sprintf("    Stream[%d]: Input\n", mId);
+    write(fd, lines.c_str(), lines.size());
 
     Camera3IOStreamBase::dump(fd, args);
 }
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 5e0587b..d4f4b15 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -53,7 +53,7 @@
     sp<IGraphicBufferProducer> mProducer;
     Vector<BufferItem> mBuffersInFlight;
 
-    static const String8 FAKE_ID;
+    static const std::string FAKE_ID;
 
     /**
      * Camera3IOStreamBase
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
index 1e7bd57..172b62a 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.cpp
@@ -27,7 +27,9 @@
 
 #include <inttypes.h>
 
+#include <android-base/stringprintf.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
@@ -42,7 +44,7 @@
 
 namespace android {
 
-Camera3OfflineSession::Camera3OfflineSession(const String8 &id,
+Camera3OfflineSession::Camera3OfflineSession(const std::string &id,
         const sp<camera3::Camera3Stream>& inputStream,
         const camera3::StreamSet& offlineStreamSet,
         camera3::BufferRecords&& bufferRecords,
@@ -75,15 +77,15 @@
         mRotateAndCropMappers(offlineStates.mRotateAndCropMappers),
         mStatus(STATUS_UNINITIALIZED) {
     ATRACE_CALL();
-    ALOGV("%s: Created offline session for camera %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Created offline session for camera %s", __FUNCTION__, mId.c_str());
 }
 
 Camera3OfflineSession::~Camera3OfflineSession() {
     ATRACE_CALL();
-    ALOGV("%s: Tearing down offline session for camera id %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Tearing down offline session for camera id %s", __FUNCTION__, mId.c_str());
 }
 
-const String8& Camera3OfflineSession::getId() const {
+const std::string& Camera3OfflineSession::getId() const {
     return mId;
 }
 
@@ -109,7 +111,7 @@
             return OK; // don't close twice
         } else if (mStatus == STATUS_ERROR) {
             ALOGE("%s: offline session %s shutting down in error state",
-                    __FUNCTION__, mId.string());
+                    __FUNCTION__, mId.c_str());
         }
         listener = mListener.promote();
     }
@@ -217,8 +219,9 @@
 
 void Camera3OfflineSession::setErrorStateLockedV(const char *fmt, va_list args) {
     // Print out all error messages to log
-    String8 errorCause = String8::formatV(fmt, args);
-    ALOGE("Camera %s: %s", mId.string(), errorCause.string());
+    std::string errorCause;
+    base::StringAppendV(&errorCause, fmt, args);
+    ALOGE("Camera %s: %s", mId.c_str(), errorCause.c_str());
 
     // But only do error state transition steps for the first error
     if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
diff --git a/services/camera/libcameraservice/device3/Camera3OfflineSession.h b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
index e780043..b5fd486 100644
--- a/services/camera/libcameraservice/device3/Camera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/Camera3OfflineSession.h
@@ -20,9 +20,6 @@
 #include <memory>
 #include <mutex>
 
-#include <utils/String8.h>
-#include <utils/String16.h>
-
 #include <android/hardware/camera/device/3.6/ICameraOfflineSession.h>
 
 #include <fmq/MessageQueue.h>
@@ -138,7 +135,7 @@
   public:
 
     // initialize by Camera3Device.
-    explicit Camera3OfflineSession(const String8& id,
+    explicit Camera3OfflineSession(const std::string& id,
             const sp<camera3::Camera3Stream>& inputStream,
             const camera3::StreamSet& offlineStreamSet,
             camera3::BufferRecords&& bufferRecords,
@@ -158,7 +155,7 @@
     /**
      * FrameProducer interface
      */
-    const String8& getId() const override;
+    const std::string& getId() const override;
     const CameraMetadata& info() const override;
     status_t waitForNextFrame(nsecs_t timeout) override;
     status_t getNextResult(CaptureResult *frame) override;
@@ -171,7 +168,7 @@
 
   protected:
     // Camera device ID
-    const String8 mId;
+    const std::string mId;
     sp<camera3::Camera3Stream> mInputStream;
     camera3::StreamSet mOutputStreams;
     camera3::BufferRecords mBufferRecords;
@@ -236,7 +233,7 @@
     std::mutex mProcessCaptureResultLock;
 
     // Tracking cause of fatal errors when in STATUS_ERROR
-    String8 mErrorCause;
+    std::string mErrorCause;
 
     // Lock to ensure requestStreamBuffers() callbacks are serialized
     std::mutex mRequestBufferInterfaceLock;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index aaf5d8d..f98636b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -31,6 +31,7 @@
 #include <ui/GraphicBuffer.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 
 #include <common/CameraDeviceBase.h>
 #include "api1/client2/JpegProcessor.h"
@@ -53,7 +54,7 @@
         sp<Surface> consumer,
         uint32_t width, uint32_t height, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        nsecs_t timestampOffset, const String8& physicalCameraId,
+        nsecs_t timestampOffset, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
@@ -88,7 +89,7 @@
         sp<Surface> consumer,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        nsecs_t timestampOffset, const String8& physicalCameraId,
+        nsecs_t timestampOffset, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
@@ -128,7 +129,7 @@
         uint32_t width, uint32_t height, int format,
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation, nsecs_t timestampOffset,
-        const String8& physicalCameraId,
+        const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
@@ -164,7 +165,7 @@
         mState = STATE_ERROR;
     }
 
-    mConsumerName = String8("Deferred");
+    mConsumerName = "Deferred";
     bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
     mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
 }
@@ -174,7 +175,7 @@
                                          int format,
                                          android_dataspace dataSpace,
                                          camera_stream_rotation_t rotation,
-                                         const String8& physicalCameraId,
+                                         const std::string& physicalCameraId,
                                          const std::unordered_set<int32_t> &sensorPixelModesUsed,
                                          IPCTransport transport,
                                          uint64_t consumerUsage, nsecs_t timestampOffset,
@@ -523,10 +524,10 @@
 }
 
 void Camera3OutputStream::dump(int fd, [[maybe_unused]] const Vector<String16> &args) const {
-    String8 lines;
-    lines.appendFormat("    Stream[%d]: Output\n", mId);
-    lines.appendFormat("      Consumer name: %s\n", mConsumerName.string());
-    write(fd, lines.string(), lines.size());
+    std::string lines;
+    lines += fmt::sprintf("    Stream[%d]: Output\n", mId);
+    lines += fmt::sprintf("      Consumer name: %s\n", mConsumerName);
+    write(fd, lines.c_str(), lines.size());
 
     Camera3IOStreamBase::dump(fd, args);
 
@@ -710,7 +711,8 @@
             // service. So update mMaxCachedBufferCount.
             mMaxCachedBufferCount = 1;
             mTotalBufferCount += mMaxCachedBufferCount;
-            res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
+            res = mPreviewFrameSpacer->run((std::string("PreviewSpacer-")
+                    + std::to_string(mId)).c_str());
             if (res != OK) {
                 ALOGE("%s: Unable to start preview spacer: %s (%d)", __FUNCTION__,
                         strerror(-res), res);
@@ -1234,7 +1236,7 @@
     return OK;
 }
 
-const String8& Camera3OutputStream::getPhysicalCameraId() const {
+const std::string& Camera3OutputStream::getPhysicalCameraId() const {
     Mutex::Autolock l(mLock);
     return physicalCameraId();
 }
@@ -1338,7 +1340,7 @@
     // Output image data to file
     std::string filePath = "/data/misc/cameraserver/";
     filePath += imageFileName;
-    std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
+    std::ofstream imageFile(filePath, std::ofstream::binary);
     if (!imageFile.is_open()) {
         ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
         graphicBuffer->unlock();
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index ebd5797..65791a9 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -89,7 +89,7 @@
     Camera3OutputStream(int id, sp<Surface> consumer,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            nsecs_t timestampOffset, const String8& physicalCameraId,
+            nsecs_t timestampOffset, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
@@ -108,7 +108,7 @@
     Camera3OutputStream(int id, sp<Surface> consumer,
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            nsecs_t timestampOffset, const String8& physicalCameraId,
+            nsecs_t timestampOffset, const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
@@ -126,7 +126,7 @@
     Camera3OutputStream(int id, uint32_t width, uint32_t height, int format,
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
             int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
@@ -214,7 +214,7 @@
     /**
      * Query the physical camera id for the output stream.
      */
-    virtual const String8& getPhysicalCameraId() const override;
+    virtual const std::string& getPhysicalCameraId() const override;
 
     /**
      * Set the graphic buffer manager to get/return the stream buffers.
@@ -276,7 +276,7 @@
     Camera3OutputStream(int id, camera_stream_type_t type,
             uint32_t width, uint32_t height, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
             int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
@@ -327,7 +327,7 @@
     bool mTraceFirstBuffer;
 
     // Name of Surface consumer
-    String8           mConsumerName;
+    std::string           mConsumerName;
 
     /**
      * GraphicBuffer manager this stream is registered to. Used to replace the buffer
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 4baa7e8..1ab8162 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -94,7 +94,7 @@
     /**
      * Query the physical camera id for the output stream.
      */
-    virtual const String8& getPhysicalCameraId() const = 0;
+    virtual const std::string& getPhysicalCameraId() const = 0;
 
     /**
      * Set the batch size for buffer operations. The output stream will request
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index f742a6d..88a7d69 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -43,6 +43,7 @@
 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
 
 #include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
 #include <camera_metadata_hidden.h>
 
 #include "device3/Camera3OutputUtils.h"
@@ -249,18 +250,18 @@
     // and RotationAndCrop mappers.
     std::set<uint32_t> keysToRemove;
 
-    auto iter = states.distortionMappers.find(states.cameraId.c_str());
+    auto iter = states.distortionMappers.find(states.cameraId);
     if (iter != states.distortionMappers.end()) {
         const auto& remappedKeys = iter->second.getRemappedKeys();
         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
     }
 
-    const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
+    const auto& remappedKeys = states.zoomRatioMappers[states.cameraId].getRemappedKeys();
     keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
 
-    auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
+    auto mapper = states.rotateAndCropMappers.find(states.cameraId);
     if (mapper != states.rotateAndCropMappers.end()) {
-        const auto& remappedKeys = iter->second.getRemappedKeys();
+        const auto& remappedKeys = mapper->second.getRemappedKeys();
         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
     }
 
@@ -342,14 +343,14 @@
                 physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
         if (timestamp.count == 0) {
             SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
-                    String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
+                    physicalMetadata.mPhysicalCameraId.c_str(), frameNumber);
             return;
         }
     }
 
     // Fix up some result metadata to account for HAL-level distortion correction
     status_t res = OK;
-    auto iter = states.distortionMappers.find(states.cameraId.c_str());
+    auto iter = states.distortionMappers.find(states.cameraId);
     if (iter != states.distortionMappers.end()) {
         res = iter->second.correctCaptureResult(&captureResult.mMetadata);
         if (res != OK) {
@@ -361,8 +362,8 @@
 
     // Fix up result metadata to account for zoom ratio availabilities between
     // HAL and app.
-    bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
-    res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
+    bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId) == cameraIdsWithZoom.end();
+    res = states.zoomRatioMappers[states.cameraId].updateCaptureResult(
             &captureResult.mMetadata, zoomRatioIs1);
     if (res != OK) {
         SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
@@ -372,7 +373,7 @@
 
     // Fix up result metadata to account for rotateAndCrop in AUTO mode
     if (rotateAndCropAuto) {
-        auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
+        auto mapper = states.rotateAndCropMappers.find(states.cameraId);
         if (mapper != states.rotateAndCropMappers.end()) {
             res = mapper->second.updateCaptureResult(
                     &captureResult.mMetadata);
@@ -401,8 +402,8 @@
     }
 
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        String8 cameraId8(physicalMetadata.mPhysicalCameraId);
-        auto mapper = states.distortionMappers.find(cameraId8.c_str());
+        const std::string cameraId = physicalMetadata.mPhysicalCameraId;
+        auto mapper = states.distortionMappers.find(cameraId);
         if (mapper != states.distortionMappers.end()) {
             res = mapper->second.correctCaptureResult(
                     &physicalMetadata.mPhysicalCameraMetadata);
@@ -413,12 +414,12 @@
             }
         }
 
-        zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
-        res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
+        zoomRatioIs1 = cameraIdsWithZoom.find(cameraId) == cameraIdsWithZoom.end();
+        res = states.zoomRatioMappers[cameraId].updateCaptureResult(
                 &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
         if (res != OK) {
             SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
-                    "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
+                    "frame %d: %s(%d)", cameraId.c_str(), frameNumber, strerror(-res), res);
             return;
         }
     }
@@ -430,9 +431,9 @@
         return;
     }
     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
-        String8 cameraId8(physicalMetadata.mPhysicalCameraId);
+        const std::string &cameraId = physicalMetadata.mPhysicalCameraId;
         res = fixupMonochromeTags(states,
-                states.physicalDeviceInfoMap.at(cameraId8.c_str()),
+                states.physicalDeviceInfoMap.at(cameraId),
                 physicalMetadata.mPhysicalCameraMetadata);
         if (res != OK) {
             SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
@@ -442,7 +443,7 @@
 
     std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
     for (auto& m : physicalMetadatas) {
-        monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
+        monitoredPhysicalMetadata.emplace(m.mPhysicalCameraId,
                 CameraMetadata(m.mPhysicalCameraMetadata));
     }
     states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
@@ -528,7 +529,7 @@
 
 // Erase the subset of physicalCameraIds that contains id
 bool erasePhysicalCameraIdSet(
-        std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
+        std::set<std::set<std::string>>& physicalCameraIds, const std::string& id) {
     bool found = false;
     for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
         if (iter->count(id) == 1) {
@@ -712,7 +713,7 @@
                 return;
             }
             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
-                String8 physicalId(result->physcam_ids[i]);
+                const std::string physicalId = result->physcam_ids[i];
                 bool validPhysicalCameraMetadata =
                         erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
                 if (!validPhysicalCameraMetadata) {
@@ -768,7 +769,7 @@
             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
                 CameraMetadata physicalMetadata;
                 physicalMetadata.append(result->physcam_metadata[i]);
-                request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
+                request.physicalMetadatas.push_back({result->physcam_ids[i],
                         physicalMetadata});
             }
             if (shutterTimestamp == 0) {
@@ -996,7 +997,7 @@
             }
             if (r.hasCallback) {
                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
-                    states.cameraId.string(), __FUNCTION__,
+                    states.cameraId.c_str(), __FUNCTION__,
                     msg.frame_number, r.resultExtras.requestId, msg.timestamp);
                 // Call listener, if any
                 if (states.listener != nullptr) {
@@ -1053,15 +1054,15 @@
             hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
 
     int streamId = 0;
-    String16 physicalCameraId;
+    std::string physicalCameraId;
     if (msg.error_stream != nullptr) {
         Camera3Stream *stream =
                 Camera3Stream::cast(msg.error_stream);
         streamId = stream->getId();
-        physicalCameraId = String16(stream->physicalCameraId());
+        physicalCameraId = stream->physicalCameraId();
     }
     ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
-            states.cameraId.string(), __FUNCTION__, msg.frame_number,
+            states.cameraId.c_str(), __FUNCTION__, msg.frame_number,
             streamId, msg.error_code);
 
     CaptureResultExtras resultExtras;
@@ -1083,13 +1084,12 @@
                     if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
                             errorCode) {
                         if (physicalCameraId.size() > 0) {
-                            String8 cameraId(physicalCameraId);
                             bool validPhysicalCameraId =
-                                    erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
+                                    erasePhysicalCameraIdSet(r.physicalCameraIds, physicalCameraId);
                             if (!validPhysicalCameraId) {
                                 ALOGE("%s: Reported result failure for physical camera device: %s "
                                         " which is not part of the respective request!",
-                                        __FUNCTION__, cameraId.string());
+                                        __FUNCTION__, physicalCameraId.c_str());
                                 break;
                             }
                             resultExtras.errorPhysicalCameraId = physicalCameraId;
@@ -1114,7 +1114,7 @@
                 } else {
                     resultExtras.frameNumber = msg.frame_number;
                     ALOGE("Camera %s: %s: cannot find in-flight request on "
-                            "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
+                            "frame %" PRId64 " error", states.cameraId.c_str(), __FUNCTION__,
                             resultExtras.frameNumber);
                 }
             }
@@ -1123,7 +1123,7 @@
                 states.listener->notifyError(errorCode, resultExtras);
             } else {
                 ALOGE("Camera %s: %s: no listener available",
-                        states.cameraId.string(), __FUNCTION__);
+                        states.cameraId.c_str(), __FUNCTION__);
             }
             break;
         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index d5328c5..134c037 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -71,7 +71,7 @@
     // Camera3Device/Camera3OfflineSession internal states used in notify/processCaptureResult
     // callbacks
     struct CaptureOutputStates {
-        const String8& cameraId;
+        const std::string& cameraId;
         std::mutex& inflightLock;
         int64_t& lastCompletedRegularFrameNumber;
         int64_t& lastCompletedReprocessFrameNumber;
@@ -115,7 +115,7 @@
     void notify(CaptureOutputStates& states, const camera_notify_msg *msg);
 
     struct RequestBufferStates {
-        const String8& cameraId;
+        const std::string& cameraId;
         std::mutex& reqBufferLock; // lock to serialize request buffer calls
         const bool useHalBufManager;
         StreamSet& outputStreams;
@@ -126,7 +126,7 @@
     };
 
     struct ReturnBufferStates {
-        const String8& cameraId;
+        const std::string& cameraId;
         const bool useHalBufManager;
         StreamSet& outputStreams;
         SessionStatsBuilder& sessionStatsBuilder;
@@ -134,7 +134,7 @@
     };
 
     struct FlushInflightReqStates {
-        const String8& cameraId;
+        const std::string& cameraId;
         std::mutex& inflightLock;
         InFlightRequestMap& inflightMap; // end of inflightLock scope
         const bool useHalBufManager;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index 2e05dda..3ac666b 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -296,7 +296,7 @@
         const VecStreamBufferType& buffers) {
     if (!states.useHalBufManager) {
         ALOGE("%s: Camera %s does not support HAL buffer managerment",
-                __FUNCTION__, states.cameraId.string());
+                __FUNCTION__, states.cameraId.c_str());
         return;
     }
 
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index f3a7359..1191f05 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -31,7 +31,7 @@
         uint32_t width, uint32_t height, int format,
         uint64_t consumerUsage, android_dataspace dataSpace,
         camera_stream_rotation_t rotation,
-        nsecs_t timestampOffset, const String8& physicalCameraId,
+        nsecs_t timestampOffset, const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
         int setId, bool useHalBufManager, int64_t dynamicProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 1102ecb..c2ff20e 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -37,7 +37,7 @@
             uint32_t width, uint32_t height, int format,
             uint64_t consumerUsage, android_dataspace dataSpace,
             camera_stream_rotation_t rotation, nsecs_t timestampOffset,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
             int setId = CAMERA3_STREAM_SET_ID_INVALID,
             bool useHalBufManager = false,
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4395455..23afa6e 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -22,6 +22,7 @@
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include <camera/StringUtils.h>
 #include "device3/Camera3Stream.h"
 #include "device3/StatusTracker.h"
 #include "utils/TraceHFR.h"
@@ -52,7 +53,7 @@
         camera_stream_type type,
         uint32_t width, uint32_t height, size_t maxSize, int format,
         android_dataspace dataSpace, camera_stream_rotation_t rotation,
-        const String8& physicalCameraId,
+        const std::string& physicalCameraId,
         const std::unordered_set<int32_t> &sensorPixelModesUsed,
         int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
         int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
@@ -60,7 +61,7 @@
     camera_stream(),
     mId(id),
     mSetId(setId),
-    mName(String8::format("Camera3Stream[%d]", id)),
+    mName(fmt::sprintf("Camera3Stream[%d]", id)),
     mMaxSize(maxSize),
     mState(STATE_CONSTRUCTED),
     mStatusId(StatusTracker::NO_STATUS_ID),
@@ -92,7 +93,7 @@
     camera_stream::data_space = dataSpace;
     camera_stream::rotation = rotation;
     camera_stream::max_buffers = 0;
-    camera_stream::physical_camera_id = mPhysicalCameraId.string();
+    camera_stream::physical_camera_id = mPhysicalCameraId;
     camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
     camera_stream::dynamic_range_profile = dynamicRangeProfile;
     camera_stream::use_case = streamUseCase;
@@ -177,7 +178,7 @@
     return mOriginalDataSpace;
 }
 
-const String8& Camera3Stream::physicalCameraId() const {
+const std::string& Camera3Stream::physicalCameraId() const {
     return mPhysicalCameraId;
 }
 
@@ -376,7 +377,7 @@
     sp<StatusTracker> statusTracker = mStatusTracker.promote();
     if (statusTracker != 0 && mStatusId == StatusTracker::NO_STATUS_ID) {
         std::string name = std::string("Stream ") + std::to_string(mId);
-        mStatusId = statusTracker->addComponent(name.c_str());
+        mStatusId = statusTracker->addComponent(name);
     }
 
     // Check if the stream configuration is unchanged, and skip reallocation if
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index f32053b..2bfaaab 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -19,7 +19,6 @@
 
 #include <gui/Surface.h>
 #include <utils/RefBase.h>
-#include <utils/String8.h>
 #include <utils/String16.h>
 #include <utils/List.h>
 
@@ -163,25 +162,25 @@
     /**
      * Get the stream's dimensions and format
      */
-    uint32_t          getWidth() const;
-    uint32_t          getHeight() const;
-    int               getFormat() const;
-    android_dataspace getDataSpace() const;
-    int32_t           getColorSpace() const;
-    uint64_t          getUsage() const;
-    void              setUsage(uint64_t usage);
-    void              setFormatOverride(bool formatOverriden);
-    bool              isFormatOverridden() const;
-    int               getOriginalFormat() const;
-    int64_t           getDynamicRangeProfile() const;
-    void              setDataSpaceOverride(bool dataSpaceOverriden);
-    bool              isDataSpaceOverridden() const;
-    android_dataspace getOriginalDataSpace() const;
-    int               getMaxHalBuffers() const;
-    const String8&    physicalCameraId() const;
-    int64_t           getStreamUseCase() const;
-    int               getTimestampBase() const;
-    bool              isDeviceTimeBaseRealtime() const;
+    uint32_t           getWidth() const;
+    uint32_t           getHeight() const;
+    int                getFormat() const;
+    android_dataspace  getDataSpace() const;
+    int32_t            getColorSpace() const;
+    uint64_t           getUsage() const;
+    void               setUsage(uint64_t usage);
+    void               setFormatOverride(bool formatOverridden);
+    bool               isFormatOverridden() const;
+    int                getOriginalFormat() const;
+    int64_t            getDynamicRangeProfile() const;
+    void               setDataSpaceOverride(bool dataSpaceOverridden);
+    bool               isDataSpaceOverridden() const;
+    android_dataspace  getOriginalDataSpace() const;
+    int                getMaxHalBuffers() const;
+    const std::string& physicalCameraId() const;
+    int64_t            getStreamUseCase() const;
+    int                getTimestampBase() const;
+    bool               isDeviceTimeBaseRealtime() const;
 
     void              setOfflineProcessingSupport(bool) override;
     bool              getOfflineProcessingSupport() const override;
@@ -487,7 +486,7 @@
      */
     const int mSetId;
 
-    const String8 mName;
+    const std::string mName;
     // Zero for formats with fixed buffer size for given dimensions.
     const size_t mMaxSize;
 
@@ -507,7 +506,7 @@
     Camera3Stream(int id, camera_stream_type type,
             uint32_t width, uint32_t height, size_t maxSize, int format,
             android_dataspace dataSpace, camera_stream_rotation_t rotation,
-            const String8& physicalCameraId,
+            const std::string& physicalCameraId,
             const std::unordered_set<int32_t> &sensorPixelModesUsed,
             int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
             int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
@@ -634,7 +633,7 @@
     bool mDataSpaceOverridden;
     const android_dataspace mOriginalDataSpace;
 
-    String8 mPhysicalCameraId;
+    std::string mPhysicalCameraId;
     nsecs_t mLastTimestamp;
 
     bool mIsMultiResolution = false;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 823be2e..7fa6273 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -62,7 +62,7 @@
     uint32_t max_buffers;
     android_dataspace_t data_space;
     camera_stream_rotation_t rotation;
-    const char* physical_camera_id;
+    std::string physical_camera_id;
 
     std::unordered_set<int32_t> sensor_pixel_modes_used;
     int64_t dynamic_range_profile;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index fd23958..8175eb5 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -25,6 +25,7 @@
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/BufferQueue.h>
 #include <gui/Surface.h>
+#include <camera/StringUtils.h>
 
 #include <ui/GraphicBuffer.h>
 
@@ -92,7 +93,7 @@
     if (mBufferItemConsumer == nullptr) {
         return NO_MEMORY;
     }
-    mConsumer->setConsumerName(mConsumerName);
+    mConsumer->setConsumerName(toString8(mConsumerName));
 
     *consumer = new Surface(mProducer);
     if (*consumer == nullptr) {
@@ -408,9 +409,9 @@
     return res;
 }
 
-String8 Camera3StreamSplitter::getUniqueConsumerName() {
+std::string Camera3StreamSplitter::getUniqueConsumerName() {
     static volatile int32_t counter = 0;
-    return String8::format("Camera3StreamSplitter-%d", android_atomic_inc(&counter));
+    return fmt::sprintf("Camera3StreamSplitter-%d", android_atomic_inc(&counter));
 }
 
 status_t Camera3StreamSplitter::notifyBufferReleased(const sp<GraphicBuffer>& buffer) {
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 0f728a0..639353c 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -30,10 +30,10 @@
 #include <utils/StrongPointer.h>
 #include <utils/Timers.h>
 
-#define SP_LOGV(x, ...) ALOGV("[%s] " x, mConsumerName.string(), ##__VA_ARGS__)
-#define SP_LOGI(x, ...) ALOGI("[%s] " x, mConsumerName.string(), ##__VA_ARGS__)
-#define SP_LOGW(x, ...) ALOGW("[%s] " x, mConsumerName.string(), ##__VA_ARGS__)
-#define SP_LOGE(x, ...) ALOGE("[%s] " x, mConsumerName.string(), ##__VA_ARGS__)
+#define SP_LOGV(x, ...) ALOGV("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGI(x, ...) ALOGI("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGW(x, ...) ALOGW("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGE(x, ...) ALOGE("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
 
 namespace android {
 
@@ -222,7 +222,7 @@
             const BufferItem& bufferItem, size_t surfaceId);
 
     // Get unique name for the buffer queue consumer
-    String8 getUniqueConsumerName();
+    std::string getUniqueConsumerName();
 
     // Helper function to get the BufferQueue slot where a particular buffer is attached to.
     int getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
@@ -289,7 +289,7 @@
     // Currently acquired input buffers
     size_t mAcquiredInputBuffers;
 
-    String8 mConsumerName;
+    std::string mConsumerName;
 
     const bool mUseHalBufManager;
 };
diff --git a/services/camera/libcameraservice/device3/InFlightRequest.h b/services/camera/libcameraservice/device3/InFlightRequest.h
index 870825a..665ac73 100644
--- a/services/camera/libcameraservice/device3/InFlightRequest.h
+++ b/services/camera/libcameraservice/device3/InFlightRequest.h
@@ -21,7 +21,6 @@
 
 #include <camera/CaptureResult.h>
 #include <camera/CameraMetadata.h>
-#include <utils/String8.h>
 #include <utils/Timers.h>
 
 #include "common/CameraDeviceBase.h"
@@ -168,7 +167,7 @@
     // For request on a physical camera stream, the inside set contains one Id
     // For request on a stream group containing physical camera streams, the
     // inside set contains all stream Ids in the group.
-    std::set<std::set<String8>> physicalCameraIds;
+    std::set<std::set<std::string>> physicalCameraIds;
 
     // Map of physicalCameraId <-> Metadata
     std::vector<PhysicalCaptureResultInfo> physicalMetadatas;
@@ -224,7 +223,7 @@
 
     InFlightRequest(int numBuffers, CaptureResultExtras extras, bool hasInput,
             bool hasAppCallback, nsecs_t minDuration, nsecs_t maxDuration, bool fixedFps,
-            const std::set<std::set<String8>>& physicalCameraIdSet, bool isStillCapture,
+            const std::set<std::set<std::string>>& physicalCameraIdSet, bool isStillCapture,
             bool isZslCapture, bool rotateAndCropAuto, bool autoframingAuto,
             const std::set<std::string>& idsWithZoom, nsecs_t requestNs,
             const SurfaceMap& outSurfaces = SurfaceMap{}) :
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index aa941b0..af48dd6 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -26,10 +26,10 @@
 #endif
 
 // Convenience macro for transient errors
-#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
             ##__VA_ARGS__)
 
-#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+#define CLOGW(fmt, ...) ALOGW("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
             ##__VA_ARGS__)
 
 // Convenience macros for transitioning to the error state
@@ -53,6 +53,7 @@
 #include <aidlcommonsupport/NativeHandle.h>
 #include <android/binder_ibinder_platform.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <camera/StringUtils.h>
 
 #include "utils/CameraTraces.h"
 #include "mediautils/SchedulingPolicyService.h"
@@ -164,7 +165,7 @@
 
 AidlCamera3Device::AidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-        const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
         bool legacyClient) :
         Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
         legacyClient) {
@@ -172,12 +173,12 @@
 }
 
 status_t AidlCamera3Device::initialize(sp<CameraProviderManager> manager,
-        const String8& monitorTags) {
+        const std::string& monitorTags) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    ALOGV("%s: Initializing AIDL device for camera %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Initializing AIDL device for camera %s", __FUNCTION__, mId.c_str());
     if (mStatus != STATUS_UNINITIALIZED) {
         CLOGE("Already initialized!");
         return INVALID_OPERATION;
@@ -186,7 +187,7 @@
 
     std::shared_ptr<camera::device::ICameraDeviceSession> session;
     ATRACE_BEGIN("CameraHal::openSession");
-    status_t res = manager->openAidlSession(mId.string(), mCallbacks,
+    status_t res = manager->openAidlSession(mId, mCallbacks,
             /*out*/ &session);
     ATRACE_END();
     if (res != OK) {
@@ -197,18 +198,18 @@
       SET_ERR("Session iface returned is null");
       return INVALID_OPERATION;
     }
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+    res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
             mOverrideToPortrait);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
         return res;
     }
-    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
-    mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId.string());
+    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId);
+    mIsCompositeJpegRDisabled = manager->isCompositeJpegRDisabled(mId);
 
     std::vector<std::string> physicalCameraIds;
-    bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
+    bool isLogical = manager->isLogicalCamera(mId, &physicalCameraIds);
     if (isLogical) {
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
@@ -299,10 +300,10 @@
     mInterface = new AidlHalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
 
     std::string providerType;
-    mVendorTagId = manager->getProviderTagIdLocked(mId.string());
+    mVendorTagId = manager->getProviderTagIdLocked(mId);
     mTagMonitor.initialize(mVendorTagId);
-    if (!monitorTags.isEmpty()) {
-        mTagMonitor.parseTagsToMonitor(String8(monitorTags));
+    if (!monitorTags.empty()) {
+        mTagMonitor.parseTagsToMonitor(monitorTags);
     }
 
     for (size_t i = 0; i < capabilities.count; i++) {
@@ -922,7 +923,7 @@
         dst.colorSpace = src->color_space;
 
         dst.bufferSize = bufferSizes[i];
-        if (src->physical_camera_id != nullptr) {
+        if (!src->physical_camera_id.empty()) {
             dst.physicalCameraId = src->physical_camera_id;
         }
         dst.groupId = cam3stream->getHalStreamGroupId();
@@ -1100,7 +1101,7 @@
             mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
                     cam3stream->getOriginalDataSpace() : src->data_space);
         dst.bufferSize = bufferSizes[i];
-        if (src->physical_camera_id != nullptr) {
+        if (!src->physical_camera_id.empty()) {
             dst.physicalCameraId = src->physical_camera_id;
         }
         dst.groupId = cam3stream->getHalStreamGroupId();
@@ -1456,7 +1457,7 @@
 }
 
 status_t AidlCamera3Device::AidlCamera3DeviceInjectionMethods::injectionInitialize(
-        const String8& injectedCamId, sp<CameraProviderManager> manager,
+        const std::string& injectedCamId, sp<CameraProviderManager> manager,
         const std::shared_ptr<camera::device::ICameraDeviceCallback>&callback) {
     ATRACE_CALL();
     Mutex::Autolock lock(mInjectionLock);
@@ -1480,7 +1481,7 @@
     mInjectedCamId = injectedCamId;
     std::shared_ptr<camera::device::ICameraInjectionSession> injectionSession;
     ATRACE_BEGIN("Injection CameraHal::openSession");
-    status_t res = manager->openAidlInjectionSession(injectedCamId.string(), callback,
+    status_t res = manager->openAidlInjectionSession(injectedCamId, callback,
                                           /*out*/ &injectionSession);
     ATRACE_END();
     if (res != OK) {
@@ -1579,7 +1580,7 @@
     return OK;
 }
 
-status_t AidlCamera3Device::injectionCameraInitialize(const String8 &injectedCamId,
+status_t AidlCamera3Device::injectionCameraInitialize(const std::string &injectedCamId,
             sp<CameraProviderManager> manager) {
         return (static_cast<AidlCamera3DeviceInjectionMethods *>
                     (mInjectionMethods.get()))->injectionInitialize(injectedCamId, manager,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index 99a308b..e0be367 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -41,7 +41,7 @@
     friend class AidlCameraDeviceCallbacks;
     explicit AidlCamera3Device(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-            const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -71,7 +71,7 @@
     virtual status_t switchToOffline(const std::vector<int32_t>& /*streamsToKeep*/,
             /*out*/ sp<CameraOfflineSessionBase>* /*session*/) override;
 
-    status_t initialize(sp<CameraProviderManager> manager, const String8& monitorTags) override;
+    status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags) override;
     class AidlHalInterface : public Camera3Device::HalInterface {
      public:
         AidlHalInterface(std::shared_ptr<
@@ -196,7 +196,7 @@
      public:
         // Initialize the injection camera and generate an hal interface.
         status_t injectionInitialize(
-                const String8& injectedCamId, sp<CameraProviderManager> manager,
+                const std::string& injectedCamId, sp<CameraProviderManager> manager,
                 const std::shared_ptr<
                     aidl::android::hardware::camera::device::ICameraDeviceCallback>&
                     callback);
@@ -257,7 +257,7 @@
     };
 
   private:
-    virtual status_t injectionCameraInitialize(const String8 &injectCamId,
+    virtual status_t injectionCameraInitialize(const std::string &injectCamId,
             sp<CameraProviderManager> manager) override;
 
     virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> parent,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index 3c3db97..01c4e88 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -31,6 +31,7 @@
 
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <android/binder_ibinder_platform.h>
+#include <camera/StringUtils.h>
 
 #include "device3/aidl/AidlCamera3OfflineSession.h"
 #include "device3/Camera3OutputStream.h"
@@ -47,7 +48,7 @@
 
 AidlCamera3OfflineSession::~AidlCamera3OfflineSession() {
     ATRACE_CALL();
-    ALOGV("%s: Tearing down aidl offline session for camera id %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Tearing down aidl offline session for camera id %s", __FUNCTION__, mId.c_str());
     Camera3OfflineSession::disconnectImpl();
 }
 
@@ -113,7 +114,7 @@
 
     std::string activePhysicalId(""); // Unused
     AidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
@@ -160,7 +161,7 @@
 
     std::string activePhysicalId(""); // Unused
     AidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
index b31ffb7..33b638c 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.h
@@ -20,7 +20,6 @@
 #include <memory>
 #include <mutex>
 
-#include <utils/String8.h>
 #include <utils/String16.h>
 
 #include "AidlCamera3OutputUtils.h"
@@ -106,7 +105,7 @@
     };
 
     // initialize by Camera3Device.
-    explicit AidlCamera3OfflineSession(const String8& id,
+    explicit AidlCamera3OfflineSession(const std::string& id,
             const sp<camera3::Camera3Stream>& inputStream,
             const camera3::StreamSet& offlineStreamSet,
             camera3::BufferRecords&& bufferRecords,
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index b2accc1..74d4230 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -145,7 +145,7 @@
     outBuffers->clear();
     if (!states.useHalBufManager) {
         ALOGE("%s: Camera %s does not support HAL buffer management",
-                __FUNCTION__, states.cameraId.string());
+                __FUNCTION__, states.cameraId.c_str());
         *status = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
         return;
     }
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 0d44dd5..06af5ff 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -26,7 +26,7 @@
 #endif
 
 // Convenience macro for transient errors
-#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.c_str(), __FUNCTION__, \
             ##__VA_ARGS__)
 
 // Convenience macros for transitioning to the error state
@@ -46,6 +46,7 @@
 #include <utils/Trace.h>
 #include <utils/Timers.h>
 #include <cutils/properties.h>
+#include <camera/StringUtils.h>
 
 #include <android/hardware/camera/device/3.7/ICameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
@@ -140,12 +141,12 @@
 }
 
 status_t HidlCamera3Device::initialize(sp<CameraProviderManager> manager,
-        const String8& monitorTags) {
+        const std::string& monitorTags) {
     ATRACE_CALL();
     Mutex::Autolock il(mInterfaceLock);
     Mutex::Autolock l(mLock);
 
-    ALOGV("%s: Initializing HIDL device for camera %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Initializing HIDL device for camera %s", __FUNCTION__, mId.c_str());
     if (mStatus != STATUS_UNINITIALIZED) {
         CLOGE("Already initialized!");
         return INVALID_OPERATION;
@@ -154,7 +155,7 @@
 
     sp<ICameraDeviceSession> session;
     ATRACE_BEGIN("CameraHal::openSession");
-    status_t res = manager->openHidlSession(mId.string(), this,
+    status_t res = manager->openHidlSession(mId, this,
             /*out*/ &session);
     ATRACE_END();
     if (res != OK) {
@@ -162,17 +163,17 @@
         return res;
     }
 
-    res = manager->getCameraCharacteristics(mId.string(), mOverrideForPerfClass, &mDeviceInfo,
+    res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
             /*overrideToPortrait*/false);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
         return res;
     }
-    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId.string());
+    mSupportNativeZoomRatio = manager->supportNativeZoomRatio(mId);
 
     std::vector<std::string> physicalCameraIds;
-    bool isLogical = manager->isLogicalCamera(mId.string(), &physicalCameraIds);
+    bool isLogical = manager->isLogicalCamera(mId, &physicalCameraIds);
     if (isLogical) {
         for (auto& physicalId : physicalCameraIds) {
             // Do not override characteristics for physical cameras
@@ -271,17 +272,17 @@
     mInterface = new HidlHalInterface(session, queue, mUseHalBufManager, mSupportOfflineProcessing);
 
     std::string providerType;
-    mVendorTagId = manager->getProviderTagIdLocked(mId.string());
+    mVendorTagId = manager->getProviderTagIdLocked(mId);
     mTagMonitor.initialize(mVendorTagId);
-    if (!monitorTags.isEmpty()) {
-        mTagMonitor.parseTagsToMonitor(String8(monitorTags));
+    if (!monitorTags.empty()) {
+        mTagMonitor.parseTagsToMonitor(monitorTags);
     }
 
     // Metadata tags needs fixup for monochrome camera device version less
     // than 3.5.
     hardware::hidl_version maxVersion{0,0};
     IPCTransport transport = IPCTransport::HIDL;
-    res = manager->getHighestSupportedVersion(mId.string(), &maxVersion, &transport);
+    res = manager->getHighestSupportedVersion(mId, &maxVersion, &transport);
     if (res != OK) {
         ALOGE("%s: Error in getting camera device version id: %s (%d)",
                 __FUNCTION__, strerror(-res), res);
@@ -416,7 +417,7 @@
     }
 
     HidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mInFlightLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
@@ -464,7 +465,7 @@
     }
 
     HidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mInFlightLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mInFlightMap, mOutputLock,  mResultQueue, mResultSignal,
@@ -715,7 +716,7 @@
     return new HidlCamera3DeviceInjectionMethods(parent);
 }
 
-status_t HidlCamera3Device::injectionCameraInitialize(const String8 &injectedCamId,
+status_t HidlCamera3Device::injectionCameraInitialize(const std::string &injectedCamId,
             sp<CameraProviderManager> manager) {
         return (static_cast<HidlCamera3DeviceInjectionMethods *>(
                 mInjectionMethods.get()))->injectionInitialize(injectedCamId, manager, this);
@@ -941,7 +942,7 @@
         }
         dst3_4.v3_2 = dst3_2;
         dst3_4.bufferSize = bufferSizes[i];
-        if (src->physical_camera_id != nullptr) {
+        if (!src->physical_camera_id.empty()) {
             dst3_4.physicalCameraId = src->physical_camera_id;
         }
         dst3_7.v3_4 = dst3_4;
@@ -1259,7 +1260,7 @@
         }
         dst3_4.v3_2 = dst3_2;
         dst3_4.bufferSize = bufferSizes[i];
-        if (src->physical_camera_id != nullptr) {
+        if (!src->physical_camera_id.empty()) {
             dst3_4.physicalCameraId = src->physical_camera_id;
         }
         dst3_7.v3_4 = dst3_4;
@@ -1739,7 +1740,7 @@
 }
 
 status_t HidlCamera3Device::HidlCamera3DeviceInjectionMethods::injectionInitialize(
-        const String8& injectedCamId, sp<CameraProviderManager> manager,
+        const std::string& injectedCamId, sp<CameraProviderManager> manager,
         const sp<android::hardware::camera::device::V3_2::ICameraDeviceCallback>&
                 callback) {
     ATRACE_CALL();
@@ -1759,7 +1760,7 @@
     mInjectedCamId = injectedCamId;
     sp<ICameraDeviceSession> session;
     ATRACE_BEGIN("Injection CameraHal::openSession");
-    status_t res = manager->openHidlSession(injectedCamId.string(), callback,
+    status_t res = manager->openHidlSession(injectedCamId, callback,
                                           /*out*/ &session);
     ATRACE_END();
     if (res != OK) {
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index 1e50844..2cfdf9d 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -31,8 +31,9 @@
             public Camera3Device {
   public:
 
-   explicit HidlCamera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
-        const String8& id, bool overrideForPerfClass, bool overrideToPortrait,
+    explicit HidlCamera3Device(
+        std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
+        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
         bool legacyClient = false) :
         Camera3Device(cameraServiceProxyWrapper, id, overrideForPerfClass, overrideToPortrait,
                 legacyClient) { }
@@ -59,7 +60,7 @@
     static uint64_t mapProducerToFrameworkUsage(
             hardware::camera::device::V3_2::BufferUsageFlags usage);
 
-    status_t initialize(sp<CameraProviderManager> manager, const String8& monitorTags) override;
+    status_t initialize(sp<CameraProviderManager> manager, const std::string& monitorTags) override;
 
     /**
      * Implementation of android::hardware::camera::device::V3_5::ICameraDeviceCallback
@@ -191,7 +192,7 @@
      public:
         // Initialize the injection camera and generate an hal interface.
         status_t injectionInitialize(
-                const String8& injectedCamId, sp<CameraProviderManager> manager,
+                const std::string& injectedCamId, sp<CameraProviderManager> manager,
                 const sp<
                     android::hardware::camera::device::V3_2 ::ICameraDeviceCallback>&
                     callback);
@@ -217,7 +218,7 @@
     hardware::Return<void> notifyHelper(
             const hardware::hidl_vec<NotifyMsgType>& msgs);
 
-    virtual status_t injectionCameraInitialize(const String8 &injectCamId,
+    virtual status_t injectionCameraInitialize(const std::string &injectCamId,
             sp<CameraProviderManager> manager) override;
 
     virtual sp<RequestThread> createNewRequestThread(wp<Camera3Device> parent,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index 28b2b47..e328ef6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -23,6 +23,7 @@
 #include <utils/Trace.h>
 
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
+#include <camera/StringUtils.h>
 
 #include "device3/hidl/HidlCamera3OfflineSession.h"
 #include "device3/Camera3OutputStream.h"
@@ -38,7 +39,7 @@
 
 HidlCamera3OfflineSession::~HidlCamera3OfflineSession() {
     ATRACE_CALL();
-    ALOGV("%s: Tearing down hidl offline session for camera id %s", __FUNCTION__, mId.string());
+    ALOGV("%s: Tearing down hidl offline session for camera id %s", __FUNCTION__, mId.c_str());
     Camera3OfflineSession::disconnectImpl();
 }
 
@@ -94,7 +95,7 @@
 
     std::string activePhysicalId("");
     HidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
@@ -136,7 +137,7 @@
 
     std::string activePhysicalId("");
     HidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
@@ -173,7 +174,7 @@
 
     std::string activePhysicalId("");
     HidlCaptureOutputStates states {
-      {mId,
+      { mId,
         mOfflineReqsLock, mLastCompletedRegularFrameNumber,
         mLastCompletedReprocessFrameNumber, mLastCompletedZslFrameNumber,
         mOfflineReqs, mOutputLock, mResultQueue, mResultSignal,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
index d22a447..df306b2 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.h
@@ -20,7 +20,6 @@
 #include <memory>
 #include <mutex>
 
-#include <utils/String8.h>
 #include <utils/String16.h>
 
 #include <android/hardware/camera/device/3.6/ICameraOfflineSession.h>
@@ -53,7 +52,7 @@
   public:
 
     // initialize by Camera3Device.
-    explicit HidlCamera3OfflineSession(const String8& id,
+    explicit HidlCamera3OfflineSession(const std::string& id,
             const sp<camera3::Camera3Stream>& inputStream,
             const camera3::StreamSet& offlineStreamSet,
             camera3::BufferRecords&& bufferRecords,
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index ee018c3..cd4a92c 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -23,6 +23,7 @@
 #include <utils/Log.h>
 
 #include <gui/RingBufferConsumer.h>
+#include <camera/StringUtils.h>
 
 #define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.string(), ##__VA_ARGS__)
 #define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.string(), ##__VA_ARGS__)
@@ -53,10 +54,10 @@
 RingBufferConsumer::~RingBufferConsumer() {
 }
 
-void RingBufferConsumer::setName(const String8& name) {
+void RingBufferConsumer::setName(const std::string& name) {
     Mutex::Autolock _l(mMutex);
-    mName = name;
-    mConsumer->setConsumerName(name);
+    mName = toString8(name);
+    mConsumer->setConsumerName(mName);
 }
 
 sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index b737469..2e523d1 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -27,8 +27,6 @@
 
 namespace android {
 
-class String8;
-
 /**
  * The RingBufferConsumer maintains a ring buffer of BufferItem objects,
  * (which are 'acquired' as long as they are part of the ring buffer, and
@@ -67,7 +65,7 @@
 
     // set the name of the RingBufferConsumer that will be used to identify it in
     // log messages.
-    void setName(const String8& name);
+    void setName(const std::string& name);
 
     // setDefaultBufferSize is used to set the size of buffers returned by
     // requestBuffers when a with and height of zero is requested.
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
index add9121..2b81224 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
@@ -16,6 +16,7 @@
 
 #include <hidl/AidlCameraServiceListener.h>
 #include <hidl/Utils.h>
+#include <camera/StringUtils.h>
 
 namespace android {
 namespace frameworks {
@@ -28,11 +29,11 @@
 typedef frameworks::cameraservice::service::V2_1::ICameraServiceListener HCameraServiceListener2_1;
 
 binder::Status H2BCameraServiceListener::onStatusChanged(
-    int32_t status, const ::android::String16& cameraId) {
+    int32_t status, const std::string& cameraId) {
   HCameraDeviceStatus hCameraDeviceStatus = convertToHidlCameraDeviceStatus(status);
   CameraStatusAndId cameraStatusAndId;
   cameraStatusAndId.deviceStatus = hCameraDeviceStatus;
-  cameraStatusAndId.cameraId = String8(cameraId).string();
+  cameraStatusAndId.cameraId = cameraId;
   auto ret = mBase->onStatusChanged(cameraStatusAndId);
   if (!ret.isOk()) {
       ALOGE("%s OnStatusChanged callback failed due to %s",__FUNCTION__,
@@ -42,8 +43,8 @@
 }
 
 binder::Status H2BCameraServiceListener::onPhysicalCameraStatusChanged(
-    int32_t status, const ::android::String16& cameraId,
-    const ::android::String16& physicalCameraId) {
+    int32_t status, const std::string& cameraId,
+    const std::string& physicalCameraId) {
   auto cast2_1 = HCameraServiceListener2_1::castFrom(mBase);
   sp<HCameraServiceListener2_1> interface2_1 = nullptr;
   if (cast2_1.isOk()) {
@@ -52,8 +53,8 @@
       HCameraDeviceStatus hCameraDeviceStatus = convertToHidlCameraDeviceStatus(status);
       V2_1::PhysicalCameraStatusAndId cameraStatusAndId;
       cameraStatusAndId.deviceStatus = hCameraDeviceStatus;
-      cameraStatusAndId.cameraId = String8(cameraId).string();
-      cameraStatusAndId.physicalCameraId = String8(physicalCameraId).string();
+      cameraStatusAndId.cameraId = cameraId;
+      cameraStatusAndId.physicalCameraId = physicalCameraId;
       auto ret = interface2_1->onPhysicalCameraStatusChanged(cameraStatusAndId);
       if (!ret.isOk()) {
         ALOGE("%s OnPhysicalCameraStatusChanged callback failed due to %s",__FUNCTION__,
@@ -65,13 +66,13 @@
 }
 
 ::android::binder::Status H2BCameraServiceListener::onTorchStatusChanged(
-    int32_t, const ::android::String16&) {
+    int32_t, const std::string&) {
   // We don't implement onTorchStatusChanged
   return binder::Status::ok();
 }
 
 ::android::binder::Status H2BCameraServiceListener::onTorchStrengthLevelChanged(
-    const ::android::String16&, int32_t) {
+    const std::string&, int32_t) {
   return binder::Status::ok();
 }
 
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
index 7ef413f..91a4c16 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.h
@@ -47,25 +47,25 @@
     ~H2BCameraServiceListener() { }
 
     virtual ::android::binder::Status onStatusChanged(int32_t status,
-            const ::android::String16& cameraId) override;
+            const std::string& cameraId) override;
     virtual ::android::binder::Status onPhysicalCameraStatusChanged(int32_t status,
-            const ::android::String16& cameraId,
-            const ::android::String16& physicalCameraId) override;
+            const std::string& cameraId,
+            const std::string& physicalCameraId) override;
 
     virtual ::android::binder::Status onTorchStatusChanged(
-            int32_t status, const ::android::String16& cameraId) override;
+            int32_t status, const std::string& cameraId) override;
     virtual ::android::binder::Status onTorchStrengthLevelChanged(
-            const ::android::String16& cameraId, int32_t newStrengthLevel) override;
+            const std::string& cameraId, int32_t newStrengthLevel) override;
     virtual binder::Status onCameraAccessPrioritiesChanged() {
         // TODO: no implementation yet.
         return binder::Status::ok();
     }
-    virtual binder::Status onCameraOpened(const ::android::String16& /*cameraId*/,
-            const ::android::String16& /*clientPackageId*/) {
+    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageId*/) {
         // empty implementation
         return binder::Status::ok();
     }
-    virtual binder::Status onCameraClosed(const ::android::String16& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
         // empty implementation
         return binder::Status::ok();
     }
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 0f7f127..59fc1cd 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -112,7 +112,7 @@
         physicalCameraSettings->emplace_back();
         CaptureRequest::PhysicalCameraSettings &physicalCameraSetting =
             physicalCameraSettings->back();
-        physicalCameraSetting.id = e.id.c_str();
+        physicalCameraSetting.id = e.id;
 
         // Read the settings either from the fmq or straightaway from the
         // request. We don't need any synchronization, since submitRequestList
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index d6910fe..4f12933 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -65,7 +65,7 @@
     android::CameraMetadata cameraMetadata;
     HStatus status = HStatus::NO_ERROR;
     binder::Status serviceRet =
-        mAidlICameraService->getCameraCharacteristics(String16(cameraId.c_str()),
+        mAidlICameraService->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
                 &cameraMetadata);
     HCameraMetadata hidlMetadata;
@@ -116,7 +116,7 @@
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, String16(cameraId.c_str()), String16(""), {},
+            callbacks, cameraId, std::string(), {},
             hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
             /*out*/&deviceRemote);
@@ -242,7 +242,7 @@
             [this](const hardware::CameraStatus& s) {
                 bool supportsHAL3 = false;
                 binder::Status sRet =
-                            mAidlICameraService->supportsCameraApi(String16(s.cameraId),
+                            mAidlICameraService->supportsCameraApi(s.cameraId,
                                     hardware::ICameraService::API_VERSION_2, &supportsHAL3);
                 return !sRet.isOk() || !supportsHAL3;
             }), cameraStatusAndIds->end());
diff --git a/services/camera/libcameraservice/hidl/Utils.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
index 2a24a23..b5dddf7 100644
--- a/services/camera/libcameraservice/hidl/Utils.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -18,6 +18,7 @@
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <cutils/native_handle.h>
 #include <mediautils/AImageReaderUtils.h>
+#include <camera/StringUtils.h>
 
 namespace android {
 namespace hardware {
@@ -89,9 +90,9 @@
     for (auto &handle : windowHandles) {
         iGBPs.push_back(new H2BGraphicBufferProducer(AImageReader_getHGBPFromHandle(handle)));
     }
-    String16 physicalCameraId16(hOutputConfiguration.physicalCameraId.c_str());
     hardware::camera2::params::OutputConfiguration outputConfiguration(
-        iGBPs, convertFromHidl(hOutputConfiguration.rotation), physicalCameraId16,
+        iGBPs, convertFromHidl(hOutputConfiguration.rotation),
+        hOutputConfiguration.physicalCameraId,
         hOutputConfiguration.windowGroupId, OutputConfiguration::SURFACE_TYPE_UNKNOWN, 0, 0,
         (windowHandles.size() > 1));
     return outputConfiguration;
@@ -157,8 +158,8 @@
     hCaptureResultExtras.frameNumber = captureResultExtras.frameNumber;
     hCaptureResultExtras.partialResultCount = captureResultExtras.partialResultCount;
     hCaptureResultExtras.errorStreamId = captureResultExtras.errorStreamId;
-    hCaptureResultExtras.errorPhysicalCameraId = hidl_string(String8(
-            captureResultExtras.errorPhysicalCameraId).string());
+    hCaptureResultExtras.errorPhysicalCameraId = hidl_string(
+            captureResultExtras.errorPhysicalCameraId.c_str());
     return hCaptureResultExtras;
 }
 
@@ -191,7 +192,7 @@
     size_t i = 0;
     for (auto &statusAndId : src) {
         auto &a = (*dst)[i++];
-        a.cameraId = statusAndId.cameraId.c_str();
+        a.cameraId = statusAndId.cameraId;
         a.deviceStatus = convertToHidlCameraDeviceStatus(statusAndId.status);
     }
     return;
@@ -203,12 +204,12 @@
     size_t i = 0;
     for (const auto &statusAndId : src) {
         auto &a = (*dst)[i++];
-        a.v2_0.cameraId = statusAndId.cameraId.c_str();
+        a.v2_0.cameraId = statusAndId.cameraId;
         a.v2_0.deviceStatus = convertToHidlCameraDeviceStatus(statusAndId.status);
         size_t numUnvailPhysicalCameras = statusAndId.unavailablePhysicalIds.size();
         a.unavailPhysicalCameraIds.resize(numUnvailPhysicalCameras);
         for (size_t j = 0; j < numUnvailPhysicalCameras; j++) {
-            a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j].c_str();
+            a.unavailPhysicalCameraIds[j] = statusAndId.unavailablePhysicalIds[j];
         }
     }
     return;
@@ -265,7 +266,7 @@
     std::shared_ptr<CaptureResultMetadataQueue> &captureResultMetadataQueue) {
     HPhysicalCaptureResultInfo hPhysicalCaptureResultInfo;
     hPhysicalCaptureResultInfo.physicalCameraId =
-        String8(physicalCaptureResultInfo.mPhysicalCameraId).string();
+        toString8(physicalCaptureResultInfo.mPhysicalCameraId);
     const camera_metadata_t *rawMetadata =
         physicalCaptureResultInfo.mPhysicalCameraMetadata.getAndLock();
     // Try using fmq at first.
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index b397573..854c342 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -219,13 +219,13 @@
 }
 
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
-    String16 cameraIdStr = String16(String8::format("%d", cameraId));
+    std::string cameraIdStr = std::to_string(cameraId);
     bool isSupported = false;
     mCameraService->supportsCameraApi(
         cameraIdStr, kCameraApiVersion[mFuzzedDataProvider->ConsumeBool()], &isSupported);
     mCameraService->isHiddenPhysicalCamera(cameraIdStr, &isSupported);
 
-    String16 parameters;
+    std::string parameters;
     mCameraService->getLegacyParameters(cameraId, &parameters);
 
     std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination> concurrentCameraIds;
@@ -318,7 +318,7 @@
 }
 
 void CameraFuzzer::invokeTorchAPIs(int32_t cameraId) {
-    String16 cameraIdStr = String16(String8::format("%d", cameraId));
+    std::string cameraIdStr = std::to_string(cameraId);
     sp<IBinder> binder = new BBinder;
 
     mCameraService->setTorchMode(cameraIdStr, true, binder);
@@ -342,7 +342,7 @@
     ::android::binder::Status rc;
     sp<ICamera> cameraDevice;
 
-    rc = mCameraService->connect(this, cameraId, String16(),
+    rc = mCameraService->connect(this, cameraId, std::string(),
                                  android::CameraService::USE_CALLING_UID,
                                  android::CameraService::USE_CALLING_PID,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
@@ -484,17 +484,18 @@
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+    virtual binder::Status onStatusChanged(int32_t, const std::string&) {
         return binder::Status::ok();
     };
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
         // No op
         return binder::Status::ok();
     };
 
-    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
+            const std::string& /*cameraId*/) {
         return binder::Status::ok();
     };
 
@@ -503,18 +504,18 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
-            const String16& /*clientPackageName*/) {
+    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageName*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+    virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
             int32_t /*torchStrength*/) {
         // No op
         return binder::Status::ok();
@@ -579,7 +580,7 @@
     for (auto s : statuses) {
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        mCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+        mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
                 &device);
@@ -600,7 +601,7 @@
 
         sp<Surface> surface(new Surface(gbProducer, /*controlledByApp*/false));
 
-        String16 noPhysicalId;
+        std::string noPhysicalId;
         size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
         OutputConfiguration output(gbProducer,
                 kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
@@ -626,9 +627,9 @@
                     kSensorPixelModes[mFuzzedDataProvider->ConsumeBool() ? 1 : 0];
             requestTemplate.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1);
             request.mPhysicalCameraSettings.clear();
-            request.mPhysicalCameraSettings.push_back({s.cameraId.string(), requestTemplate});
+            request.mPhysicalCameraSettings.push_back({s.cameraId, requestTemplate});
             device->submitRequest(request, /*streaming*/false, /*out*/&info);
-            ALOGV("%s : camera id %s submit request id %d",__FUNCTION__, s.cameraId.string(),
+            ALOGV("%s : camera id %s submit request id %d",__FUNCTION__, s.cameraId.c_str(),
                     info.mRequestId);
         }
         device->disconnect();
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 5e2a3fb..b035edd 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -31,11 +31,14 @@
         "libmedia_headers",
     ],
 
+    defaults: [
+        "libcameraservice_deps",
+    ],
+
     shared_libs: [
         "libbase",
         "libbinder",
         "libcutils",
-        "libcameraservice",
         "libhidlbase",
         "liblog",
         "libcamera_client",
@@ -45,11 +48,6 @@
         "libjpeg",
         "libexif",
         "android.hardware.camera.common@1.0",
-        "android.hardware.camera.provider@2.4",
-        "android.hardware.camera.provider@2.5",
-        "android.hardware.camera.provider@2.6",
-        "android.hardware.camera.provider@2.7",
-        "android.hardware.camera.provider-V2-ndk",
         "android.hardware.camera.device@1.0",
         "android.hardware.camera.device@3.2",
         "android.hardware.camera.device@3.4",
@@ -58,6 +56,12 @@
     ],
 
     static_libs: [
+        "android.hardware.camera.provider@2.4",
+        "android.hardware.camera.provider@2.5",
+        "android.hardware.camera.provider@2.6",
+        "android.hardware.camera.provider@2.7",
+        "android.hardware.camera.provider-V2-ndk",
+        "libcameraservice",
         "libgmock",
     ],
 
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index 731eebf..db43a02 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -37,17 +37,18 @@
 public:
     virtual ~TestCameraServiceListener() {};
 
-    virtual binder::Status onStatusChanged(int32_t , const String16&) {
+    virtual binder::Status onStatusChanged(int32_t , const std::string&) {
         return binder::Status::ok();
     };
 
     virtual binder::Status onPhysicalCameraStatusChanged(int32_t /*status*/,
-            const String16& /*cameraId*/, const String16& /*physicalCameraId*/) {
+            const std::string& /*cameraId*/, const std::string& /*physicalCameraId*/) {
         // No op
         return binder::Status::ok();
     };
 
-    virtual binder::Status onTorchStatusChanged(int32_t /*status*/, const String16& /*cameraId*/) {
+    virtual binder::Status onTorchStatusChanged(int32_t /*status*/,
+            const std::string& /*cameraId*/) {
         return binder::Status::ok();
     };
 
@@ -56,18 +57,18 @@
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraOpened(const String16& /*cameraId*/,
-            const String16& /*clientPackageName*/) {
+    virtual binder::Status onCameraOpened(const std::string& /*cameraId*/,
+            const std::string& /*clientPackageName*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onCameraClosed(const String16& /*cameraId*/) {
+    virtual binder::Status onCameraClosed(const std::string& /*cameraId*/) {
         // No op
         return binder::Status::ok();
     }
 
-    virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+    virtual binder::Status onTorchStrengthLevelChanged(const std::string& /*cameraId*/,
             int32_t /*torchStrength*/) {
         // No op
         return binder::Status::ok();
@@ -123,13 +124,13 @@
             mCameraDisabled(false), mOverrideCameraDisabled(false)
     { }
 
-    virtual binder::Status getRotateAndCropOverride(const String16& packageName, int lensFacing,
+    virtual binder::Status getRotateAndCropOverride(const std::string& packageName, int lensFacing,
             int userId, int *ret) override {
         return mCameraServiceProxy->getRotateAndCropOverride(packageName, lensFacing,
                 userId, ret);
     }
 
-    virtual binder::Status getAutoframingOverride(const String16& packageName, int *ret) override {
+    virtual binder::Status getAutoframingOverride(const std::string& packageName, int *ret) override {
         return mCameraServiceProxy->getAutoframingOverride(packageName, ret);
     }
 
@@ -224,7 +225,7 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
         AutoDisconnectDevice autoDisconnect(device);
@@ -238,7 +239,7 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &device);
         AutoDisconnectDevice autoDisconnect(device);
@@ -257,14 +258,14 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
@@ -285,14 +286,14 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, String16(s.cameraId), String16(), {},
+                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
                 android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 1a6b2e0..c0cd1a9 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -302,15 +302,15 @@
 
     ~TestStatusListener() {}
 
-    void onDeviceStatusChanged(const String8 &,
+    void onDeviceStatusChanged(const std::string &,
             CameraDeviceStatus) override {}
-    void onDeviceStatusChanged(const String8 &, const String8 &,
+    void onDeviceStatusChanged(const std::string &, const std::string &,
             CameraDeviceStatus) override {
         mPhysicalCameraStatusChangeCount++;
     }
-    void onTorchStatusChanged(const String8 &,
+    void onTorchStatusChanged(const std::string &,
             TorchModeStatus) override {}
-    void onTorchStatusChanged(const String8 &,
+    void onTorchStatusChanged(const std::string &,
             TorchModeStatus, SystemCameraKind) override {}
     void onNewProviderRegistered() override {}
 };
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index b367571..33cc61a 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -19,8 +19,8 @@
 
 #include <random>
 
+#include <fmt/printf.h>
 #include <gtest/gtest.h>
-#include <android-base/stringprintf.h>
 #include <android-base/chrono_utils.h>
 
 #include "../device3/DistortionMapper.h"
@@ -260,9 +260,9 @@
                 rawToCorrectedDurationMs) / (randCoords.size() / 2) ).count();
 
     test->RecordProperty("CorrectedToRawDurationPerCoordUs",
-            base::StringPrintf("%f", correctedToRawDurationPerCoordUs));
+            fmt::sprintf("%f", correctedToRawDurationPerCoordUs));
     test->RecordProperty("RawToCorrectedDurationPerCoordUs",
-            base::StringPrintf("%f", rawToCorrectedDurationPerCoordUs));
+            fmt::sprintf("%f", rawToCorrectedDurationPerCoordUs));
 
     // Calculate mapping errors after round trip
     float totalErrorSq = 0;
@@ -296,9 +296,9 @@
     }
 
     float rmsError = std::sqrt(totalErrorSq / randCoords.size());
-    test->RecordProperty("RmsError", base::StringPrintf("%f", rmsError));
+    test->RecordProperty("RmsError", fmt::sprintf("%f", rmsError));
     for (size_t i = 0; i < histogram.size(); i++) {
-        std::string label = base::StringPrintf("HistogramBin[%f,%f)",
+        std::string label = fmt::sprintf("HistogramBin[%f,%f)",
                 (float)i/bucketsPerPixel, (float)(i + 1)/bucketsPerPixel);
         test->RecordProperty(label, histogram[i]);
     }
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index b58975f..d07bf6d 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -20,6 +20,8 @@
 
 #include <inttypes.h>
 #include <utils/Log.h>
+#include <utils/String16.h>
+#include <camera/StringUtils.h>
 #include <binder/IServiceManager.h>
 
 #include "CameraServiceProxyWrapper.h"
@@ -32,7 +34,7 @@
 
 namespace {
 // Sentinel value to be returned when extension session with a stale or invalid key is reported.
-const String16 POISON_EXT_STATS_KEY("poisoned_stats");
+const std::string POISON_EXT_STATS_KEY("poisoned_stats");
 } // anonymous namespace
 
 /**
@@ -100,7 +102,7 @@
     mSessionStats.mRequestCount = requestCount;
     mSessionStats.mResultErrorCount = resultErrorCount;
     mSessionStats.mDeviceError = deviceError;
-    mSessionStats.mUserTag = String16(userTag.c_str());
+    mSessionStats.mUserTag = userTag;
     mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
     mSessionStats.mStreamStats = streamStats;
 
@@ -116,14 +118,14 @@
     return mSessionStats.mLogId;
 }
 
-String16 CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
+std::string CameraServiceProxyWrapper::CameraSessionStatsWrapper::updateExtensionSessionStats(
         const hardware::CameraExtensionSessionStats& extStats) {
     Mutex::Autolock l(mLock);
     CameraExtensionSessionStats& currStats = mSessionStats.mCameraExtensionSessionStats;
     if (currStats.key != extStats.key) {
         // Mismatched keys. Extensions stats likely reported for a closed session
         ALOGW("%s: mismatched extensions stats key: current='%s' reported='%s'. Dropping stats.",
-              __FUNCTION__, String8(currStats.key).c_str(), String8(extStats.key).c_str());
+              __FUNCTION__, toStdString(currStats.key).c_str(), toStdString(extStats.key).c_str());
         return POISON_EXT_STATS_KEY; // return poisoned key to so future calls are
                                      // definitely dropped.
     }
@@ -134,15 +136,15 @@
         ALOGV("%s: Overwriting extension session stats: %s", __FUNCTION__,
               extStats.toString().c_str());
         currStats = extStats;
-        return currStats.key;
+        return toStdString(currStats.key);
     }
 
     // Matching empty keys...
-    if (mSessionStats.mClientName != extStats.clientName) {
+    if (mSessionStats.mClientName != toStdString(extStats.clientName)) {
         ALOGW("%s: extension stats reported for unexpected package: current='%s' reported='%s'. "
               "Dropping stats.", __FUNCTION__,
-              String8(mSessionStats.mClientName).c_str(),
-              String8(extStats.clientName).c_str());
+              mSessionStats.mClientName.c_str(),
+              toStdString(extStats.clientName).c_str());
         return POISON_EXT_STATS_KEY;
     }
 
@@ -166,12 +168,12 @@
         key << mSessionStats.mSessionIndex << '/' << mSessionStats.mLogId;
         currStats.key = String16(key.str().c_str());
         ALOGV("%s: New extension session stats: %s", __FUNCTION__, currStats.toString().c_str());
-        return currStats.key;
+        return toStdString(currStats.key);
     }
 
     // Camera is closed. Probably a stale call.
     ALOGW("%s: extension stats reported for closed camera id '%s'. Dropping stats.",
-          __FUNCTION__, String8(mSessionStats.mCameraId).c_str());
+          __FUNCTION__, mSessionStats.mCameraId.c_str());
     return {};
 }
 
@@ -209,12 +211,13 @@
     proxyBinder->pingForUserUpdate();
 }
 
-int CameraServiceProxyWrapper::getRotateAndCropOverride(String16 packageName, int lensFacing,
-        int userId) {
+int CameraServiceProxyWrapper::getRotateAndCropOverride(const std::string &packageName,
+        int lensFacing, int userId) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
     if (proxyBinder == nullptr) return true;
     int ret = 0;
-    auto status = proxyBinder->getRotateAndCropOverride(packageName, lensFacing, userId, &ret);
+    auto status = proxyBinder->getRotateAndCropOverride(packageName, lensFacing,
+            userId, &ret);
     if (!status.isOk()) {
         ALOGE("%s: Failed during top activity orientation query: %s", __FUNCTION__,
                 status.exceptionMessage().c_str());
@@ -223,7 +226,7 @@
     return ret;
 }
 
-int CameraServiceProxyWrapper::getAutoframingOverride(const String16& packageName) {
+int CameraServiceProxyWrapper::getAutoframingOverride(const std::string& packageName) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
     if (proxyBinder == nullptr) {
         return ANDROID_CONTROL_AUTOFRAMING_OFF;
@@ -238,7 +241,7 @@
     return ret;
 }
 
-void CameraServiceProxyWrapper::logStreamConfigured(const String8& id,
+void CameraServiceProxyWrapper::logStreamConfigured(const std::string& id,
         int operatingMode, bool internalConfig, int32_t latencyMs) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
@@ -256,7 +259,7 @@
     sessionStats->onStreamConfigured(operatingMode, internalConfig, latencyMs);
 }
 
-void CameraServiceProxyWrapper::logActive(const String8& id, float maxPreviewFps) {
+void CameraServiceProxyWrapper::logActive(const std::string& id, float maxPreviewFps) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -273,7 +276,7 @@
     sessionStats->onActive(proxyBinder, maxPreviewFps);
 }
 
-void CameraServiceProxyWrapper::logIdle(const String8& id,
+void CameraServiceProxyWrapper::logIdle(const std::string& id,
         int64_t requestCount, int64_t resultErrorCount, bool deviceError,
         const std::string& userTag, int32_t videoStabilizationMode,
         const std::vector<hardware::CameraStreamStats>& streamStats) {
@@ -304,8 +307,8 @@
             videoStabilizationMode, streamStats);
 }
 
-void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
-            const String16& clientPackageName, int effectiveApiLevel, bool isNdk,
+void CameraServiceProxyWrapper::logOpen(const std::string& id, int facing,
+            const std::string& clientPackageName, int effectiveApiLevel, bool isNdk,
             int32_t latencyMs) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
@@ -325,7 +328,7 @@
         int64_t logId = generateLogId(mRandomDevice);
 
         sessionStats = std::make_shared<CameraSessionStatsWrapper>(
-                String16(id), facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
+                id, facing, CameraSessionStats::CAMERA_STATE_OPEN, clientPackageName,
                 apiLevel, isNdk, latencyMs, logId);
         mSessionStatsMap.emplace(id, sessionStats);
         ALOGV("%s: Adding id %s", __FUNCTION__, id.c_str());
@@ -337,7 +340,8 @@
     sessionStats->onOpen(proxyBinder);
 }
 
-void CameraServiceProxyWrapper::logClose(const String8& id, int32_t latencyMs, bool deviceError) {
+void CameraServiceProxyWrapper::logClose(const std::string& id, int32_t latencyMs,
+        bool deviceError) {
     std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
     {
         Mutex::Autolock l(mLock);
@@ -376,7 +380,7 @@
     return ret;
 }
 
-int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const String8& cameraId) {
+int64_t CameraServiceProxyWrapper::getCurrentLogIdForCamera(const std::string& cameraId) {
     std::shared_ptr<CameraSessionStatsWrapper> stats;
     {
         Mutex::Autolock _l(mLock);
@@ -403,10 +407,10 @@
     return ret;
 }
 
-String16 CameraServiceProxyWrapper::updateExtensionStats(
+std::string CameraServiceProxyWrapper::updateExtensionStats(
         const hardware::CameraExtensionSessionStats& extStats) {
     std::shared_ptr<CameraSessionStatsWrapper> stats;
-    String8 cameraId = String8(extStats.cameraId);
+    std::string cameraId = toStdString(extStats.cameraId);
     {
         Mutex::Autolock _l(mLock);
         if (mSessionStatsMap.count(cameraId) == 0) {
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index e32580c..1afe5b3 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -20,11 +20,10 @@
 #include <android/hardware/ICameraServiceProxy.h>
 
 #include <utils/Mutex.h>
-#include <utils/String8.h>
-#include <utils/String16.h>
 #include <utils/StrongPointer.h>
 #include <utils/Timers.h>
 #include <random>
+#include <string>
 
 #include <camera/CameraSessionStats.h>
 
@@ -49,8 +48,8 @@
         void updateProxyDeviceState(sp<hardware::ICameraServiceProxy>& proxyBinder);
 
       public:
-        CameraSessionStatsWrapper(const String16& cameraId, int facing, int newCameraState,
-                                  const String16& clientName, int apiLevel, bool isNdk,
+        CameraSessionStatsWrapper(const std::string& cameraId, int facing, int newCameraState,
+                                  const std::string& clientName, int apiLevel, bool isNdk,
                                   int32_t latencyMs, int64_t logId)
             : mSessionStats(cameraId, facing, newCameraState, clientName, apiLevel, isNdk,
                             latencyMs, logId) {}
@@ -65,7 +64,8 @@
                 const std::string& userTag, int32_t videoStabilizationMode,
                 const std::vector<hardware::CameraStreamStats>& streamStats);
 
-        String16 updateExtensionSessionStats(const hardware::CameraExtensionSessionStats& extStats);
+        std::string updateExtensionSessionStats(
+                const hardware::CameraExtensionSessionStats& extStats);
 
         // Returns the logId associated with this event.
         int64_t getLogId();
@@ -74,7 +74,7 @@
     // Lock for camera session stats map
     Mutex mLock;
     // Map from camera id to the camera's session statistics
-    std::map<String8, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
+    std::map<std::string, std::shared_ptr<CameraSessionStatsWrapper>> mSessionStatsMap;
 
     std::random_device mRandomDevice;  // pulls 32-bit random numbers from /dev/urandom
 
@@ -93,22 +93,22 @@
     static sp<hardware::ICameraServiceProxy> getDefaultCameraServiceProxy();
 
     // Open
-    void logOpen(const String8& id, int facing,
-            const String16& clientPackageName, int apiLevel, bool isNdk,
+    void logOpen(const std::string& id, int facing,
+            const std::string& clientPackageName, int apiLevel, bool isNdk,
             int32_t latencyMs);
 
     // Close
-    void logClose(const String8& id, int32_t latencyMs, bool deviceError);
+    void logClose(const std::string& id, int32_t latencyMs, bool deviceError);
 
     // Stream configuration
-    void logStreamConfigured(const String8& id, int operatingMode, bool internalReconfig,
+    void logStreamConfigured(const std::string& id, int operatingMode, bool internalReconfig,
             int32_t latencyMs);
 
     // Session state becomes active
-    void logActive(const String8& id, float maxPreviewFps);
+    void logActive(const std::string& id, float maxPreviewFps);
 
     // Session state becomes idle
-    void logIdle(const String8& id,
+    void logIdle(const std::string& id,
             int64_t requestCount, int64_t resultErrorCount, bool deviceError,
             const std::string& userTag, int32_t videoStabilizationMode,
             const std::vector<hardware::CameraStreamStats>& streamStats);
@@ -117,10 +117,10 @@
     void pingCameraServiceProxy();
 
     // Return the current top activity rotate and crop override.
-    int getRotateAndCropOverride(String16 packageName, int lensFacing, int userId);
+    int getRotateAndCropOverride(const std::string &packageName, int lensFacing, int userId);
 
     // Return the current top activity autoframing.
-    int getAutoframingOverride(const String16& packageName);
+    int getAutoframingOverride(const std::string& packageName);
 
     // Detect if the camera is disabled by device policy.
     bool isCameraDisabled(int userId);
@@ -128,10 +128,10 @@
     // Returns the logId currently associated with the given cameraId. See 'mLogId' in
     // frameworks/av/camera/include/camera/CameraSessionStats.h for more details about this
     // identifier. Returns a non-0 value on success.
-    int64_t getCurrentLogIdForCamera(const String8& cameraId);
+    int64_t getCurrentLogIdForCamera(const std::string& cameraId);
 
     // Update the stored extension stats to the latest values
-    String16 updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
+    std::string updateExtensionStats(const hardware::CameraExtensionSessionStats& extStats);
 };
 
 } // android
diff --git a/services/camera/libcameraservice/utils/CameraTraces.h b/services/camera/libcameraservice/utils/CameraTraces.h
index 71fa334..be8d393 100644
--- a/services/camera/libcameraservice/utils/CameraTraces.h
+++ b/services/camera/libcameraservice/utils/CameraTraces.h
@@ -18,7 +18,6 @@
 #define ANDROID_SERVERS_CAMERA_TRACES_H_
 
 #include <utils/Errors.h>
-#include <utils/String16.h>
 #include <utils/Vector.h>
 
 namespace android {
diff --git a/services/camera/libcameraservice/utils/LatencyHistogram.cpp b/services/camera/libcameraservice/utils/LatencyHistogram.cpp
index e2bdc43..a167068 100644
--- a/services/camera/libcameraservice/utils/LatencyHistogram.cpp
+++ b/services/camera/libcameraservice/utils/LatencyHistogram.cpp
@@ -16,8 +16,9 @@
 
 #define LOG_TAG "CameraLatencyHistogram"
 #include <inttypes.h>
+#include <android-base/stringprintf.h>
 #include <utils/Log.h>
-#include <utils/String8.h>
+#include <camera/StringUtils.h>
 
 #include "LatencyHistogram.h"
 
@@ -55,18 +56,18 @@
         return;
     }
 
-    String8 lines;
-    lines.appendFormat("%s (%" PRId64 ") samples\n", name, mTotalCount);
+    std::string lines;
+    lines += fmt::sprintf("%s (%" PRId64 ") samples\n", name, mTotalCount);
 
-    String8 lineBins, lineBinCounts;
+    std::string lineBins, lineBinCounts;
     formatHistogramText(lineBins, lineBinCounts);
 
-    lineBins.append("\n");
-    lineBinCounts.append("\n");
-    lines.append(lineBins);
-    lines.append(lineBinCounts);
+    lineBins += ("\n");
+    lineBinCounts += ("\n");
+    lines += lineBins;
+    lines += lineBinCounts;
 
-    write(fd, lines.string(), lines.size());
+    write(fd, lines.c_str(), lines.size());
 }
 
 void CameraLatencyHistogram::log(const char* fmt, ...) {
@@ -76,11 +77,12 @@
 
     va_list args;
     va_start(args, fmt);
-    String8 histogramName = String8::formatV(fmt, args);
-    ALOGI("%s (%" PRId64 ") samples:", histogramName.string(), mTotalCount);
+    std::string histogramName;
+    base::StringAppendV(&histogramName, fmt, args);
+    ALOGI("%s (%" PRId64 ") samples:", histogramName.c_str(), mTotalCount);
     va_end(args);
 
-    String8 lineBins, lineBinCounts;
+    std::string lineBins, lineBinCounts;
     formatHistogramText(lineBins, lineBinCounts);
 
     ALOGI("%s", lineBins.c_str());
@@ -88,19 +90,19 @@
 }
 
 void CameraLatencyHistogram::formatHistogramText(
-        String8& lineBins, String8& lineBinCounts) const {
+        std::string& lineBins, std::string& lineBinCounts) const {
     lineBins = "  ";
     lineBinCounts = "  ";
 
     for (int32_t i = 0; i < mBinCount; i++) {
         if (i == mBinCount - 1) {
-            lineBins.append("    inf (max ms)");
+            lineBins += "    inf (max ms)";
         } else {
-            lineBins.appendFormat("%7d", mBinSizeMs*(i+1));
+            lineBins += fmt::sprintf("%7d", mBinSizeMs*(i+1));
         }
-        lineBinCounts.appendFormat("   %02.2f", 100.0*mBins[i]/mTotalCount);
+        lineBinCounts += fmt::sprintf("   %02.2f", 100.0*mBins[i]/mTotalCount);
     }
-    lineBinCounts.append(" (%)");
+    lineBinCounts += " (%)";
 }
 
 }; //namespace android
diff --git a/services/camera/libcameraservice/utils/LatencyHistogram.h b/services/camera/libcameraservice/utils/LatencyHistogram.h
index bfd9b1b..89a9cc1 100644
--- a/services/camera/libcameraservice/utils/LatencyHistogram.h
+++ b/services/camera/libcameraservice/utils/LatencyHistogram.h
@@ -40,7 +40,7 @@
     std::vector<int64_t> mBins;
     uint64_t mTotalCount;
 
-    void formatHistogramText(String8& lineBins, String8& lineBinCounts) const;
+    void formatHistogramText(std::string& lineBins, std::string& lineBinCounts) const;
 }; // class CameraLatencyHistogram
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
new file mode 100644
index 0000000..92a1030
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SchedulingPolicyUtils.h"
+
+#include <errno.h>
+#include <pthread.h>
+#include <sched.h>
+
+#include "CameraThreadState.h"
+#include <private/android_filesystem_config.h>
+#include <processgroup/processgroup.h>
+#include <processgroup/sched_policy.h>
+#include <procinfo/process.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+int requestPriorityDirect(int pid, int tid, int prio) {
+    android::procinfo::ProcessInfo processInfo;
+    static const int kMinPrio = 1;
+    static const int kMaxPrio = 3;
+
+    if (!android::procinfo::GetProcessInfo(tid, &processInfo)) {
+       ALOGE("%s: Error getting process info", __FUNCTION__);
+       return -EPERM;
+    }
+
+    if (prio < kMinPrio || prio > kMaxPrio || processInfo.pid != pid) {
+        ALOGE("%s: Invalid parameter prio=%d pid=%d procinfo.pid=%d", __FUNCTION__, prio, pid,
+                processInfo.pid);
+        return -EPERM;
+    }
+
+    // Set the thread group as audio system thread group in consistent with the
+    // implementation in SchedulingPolicyService.java when isApp is false in
+    // requestPriority method.
+    if (!SetTaskProfiles(tid, {get_sched_policy_profile_name(SP_AUDIO_SYS)},
+            /*use_fd_cache*/ true)) {
+        ALOGE("%s:Error in  SetTaskProfiles", __FUNCTION__);
+        return -EPERM;
+    }
+
+    struct sched_param param;
+    param.sched_priority = prio;
+    return sched_setscheduler(tid, SCHED_FIFO | SCHED_RESET_ON_FORK, &param);
+}
+
+} // namespace SchedulingPolicyUtils
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
new file mode 100644
index 0000000..f71fddf
--- /dev/null
+++ b/services/camera/libcameraservice/utils/SchedulingPolicyUtils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+#define ANDROID_SERVICE_CAMERA_SCHEDULING_POLICY_UTILS_H
+
+namespace android {
+namespace camera3 {
+namespace SchedulingPolicyUtils {
+
+/**
+ * Request elevated priority for thread tid, whose thread group leader must be pid.
+ * Instead of using scheduling policy service, this method uses direct system calls.
+ * The priority parameter is currently restricted from 1 to 3 matching
+ * scheduling policy service implementation.
+ */
+int requestPriorityDirect(int pid, int tid, int prio);
+
+} // SchedulingPolicyUtils
+} // camera3
+} // android
+
+#endif
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 89e75b3..f7257e3 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -28,6 +28,7 @@
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
 #include "system/graphics-base-v1.1.h"
+#include <camera/StringUtils.h>
 #include <ui/PublicFormat.h>
 
 using android::camera3::OutputStreamInfo;
@@ -427,15 +428,15 @@
 binder::Status createSurfaceFromGbp(
         OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
         int32_t colorSpace) {
     // bufferProducer must be non-null
     if (gbp == nullptr) {
-        String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
-        ALOGW("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
+        ALOGW("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     // HACK b/10949105
     // Query consumer usage bits to set async operation mode for
@@ -444,14 +445,14 @@
     uint64_t consumerUsage = 0;
     status_t err;
     if ((err = gbp->getConsumerUsage(&consumerUsage)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
-                logicalCameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Failed to query Surface consumer usage: %s (%d)",
+                logicalCameraId.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
     if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
         ALOGW("%s: Camera %s with consumer usage flag: %" PRIu64 ": Forcing asynchronous mode for"
-                "stream", __FUNCTION__, logicalCameraId.string(), consumerUsage);
+                "stream", __FUNCTION__, logicalCameraId.c_str(), consumerUsage);
         useAsync = true;
     }
 
@@ -469,38 +470,38 @@
     int width, height, format;
     android_dataspace dataSpace;
     if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
-                 logicalCameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Failed to query Surface width: %s (%d)",
+                 logicalCameraId.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
-                logicalCameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Failed to query Surface height: %s (%d)",
+                logicalCameraId.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
-                logicalCameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Failed to query Surface format: %s (%d)",
+                logicalCameraId.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
     if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
             reinterpret_cast<int*>(&dataSpace))) != OK) {
-        String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
-                logicalCameraId.string(), strerror(-err), err);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: Failed to query Surface dataspace: %s (%d)",
+                logicalCameraId.c_str(), strerror(-err), err);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
     }
 
     if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
             format != HAL_PIXEL_FORMAT_BLOB) {
         if (!dataSpaceFromColorSpace(&dataSpace, colorSpace)) {
-            String8 msg = String8::format("Camera %s: color space %d not supported, failed to "
-                    "convert to data space", logicalCameraId.string(), colorSpace);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            std::string msg = fmt::sprintf("Camera %s: color space %d not supported, failed to "
+                    "convert to data space", logicalCameraId.c_str(), colorSpace);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
     }
 
@@ -510,16 +511,16 @@
             ((consumerUsage & GRALLOC_USAGE_HW_MASK) &&
              ((consumerUsage & GRALLOC_USAGE_SW_READ_MASK) == 0))) {
         ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
-                __FUNCTION__, logicalCameraId.string(), format);
+                __FUNCTION__, logicalCameraId.c_str(), format);
         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
     }
     std::unordered_set<int32_t> overriddenSensorPixelModes;
     if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed, format, width, height,
             physicalCameraMetadata, &overriddenSensorPixelModes) != OK) {
-        String8 msg = String8::format("Camera %s: sensor pixel modes for stream with "
-                "format %#x are not valid",logicalCameraId.string(), format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: sensor pixel modes for stream with "
+                "format %#x are not valid",logicalCameraId.c_str(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     bool foundInMaxRes = false;
     if (overriddenSensorPixelModes.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -532,58 +533,58 @@
             !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
             format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
             /*out*/&height)) {
-        String8 msg = String8::format("Camera %s: No supported stream configurations with "
+        std::string msg = fmt::sprintf("Camera %s: No supported stream configurations with "
                 "format %#x defined, failed to create output stream",
-                logicalCameraId.string(), format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                logicalCameraId.c_str(), format);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (!SessionConfigurationUtils::isDynamicRangeProfileSupported(dynamicRangeProfile,
                 physicalCameraMetadata)) {
-        String8 msg = String8::format("Camera %s: Dynamic range profile 0x%" PRIx64
-                " not supported,failed to create output stream", logicalCameraId.string(),
+        std::string msg = fmt::sprintf("Camera %s: Dynamic range profile 0x%" PRIx64
+                " not supported,failed to create output stream", logicalCameraId.c_str(),
                 dynamicRangeProfile);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (SessionConfigurationUtils::is10bitDynamicRangeProfile(dynamicRangeProfile) &&
             !SessionConfigurationUtils::is10bitCompatibleFormat(format, dataSpace)) {
-        String8 msg = String8::format("Camera %s: No 10-bit supported stream configurations with "
+        std::string msg = fmt::sprintf("Camera %s: No 10-bit supported stream configurations with "
                 "format %#x defined and profile %" PRIx64 ", failed to create output stream",
-                logicalCameraId.string(), format, dynamicRangeProfile);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                logicalCameraId.c_str(), format, dynamicRangeProfile);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
             SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
             !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
                     dynamicRangeProfile, physicalCameraMetadata)) {
-        String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
+        std::string msg = fmt::sprintf("Camera %s: Color space %d not supported, failed to "
                 "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
-                logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                logicalCameraId.c_str(), colorSpace, format, dynamicRangeProfile);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
             physicalCameraMetadata)) {
-        String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
-                " failed to create output stream", logicalCameraId.string(), streamUseCase);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: stream use case %" PRId64 " not supported,"
+                " failed to create output stream", logicalCameraId.c_str(), streamUseCase);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (timestampBase < OutputConfiguration::TIMESTAMP_BASE_DEFAULT ||
             timestampBase > OutputConfiguration::TIMESTAMP_BASE_MAX) {
-        String8 msg = String8::format("Camera %s: invalid timestamp base %d",
-                logicalCameraId.string(), timestampBase);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: invalid timestamp base %d",
+                logicalCameraId.c_str(), timestampBase);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (mirrorMode < OutputConfiguration::MIRROR_MODE_AUTO ||
             mirrorMode > OutputConfiguration::MIRROR_MODE_V) {
-        String8 msg = String8::format("Camera %s: invalid mirroring mode %d",
-                logicalCameraId.string(), mirrorMode);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s: invalid mirroring mode %d",
+                logicalCameraId.c_str(), mirrorMode);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     if (!isStreamInfoValid) {
@@ -601,45 +602,45 @@
         return binder::Status::ok();
     }
     if (width != streamInfo.width) {
-        String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
-                logicalCameraId.string(), width, streamInfo.width);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s:Surface width doesn't match: %d vs %d",
+                logicalCameraId.c_str(), width, streamInfo.width);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (height != streamInfo.height) {
-        String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
-                 logicalCameraId.string(), height, streamInfo.height);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s:Surface height doesn't match: %d vs %d",
+                 logicalCameraId.c_str(), height, streamInfo.height);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (format != streamInfo.format) {
-        String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
-                 logicalCameraId.string(), format, streamInfo.format);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg = fmt::sprintf("Camera %s:Surface format doesn't match: %d vs %d",
+                 logicalCameraId.c_str(), format, streamInfo.format);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
         if (dataSpace != streamInfo.dataSpace) {
-            String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
-                    logicalCameraId.string(), dataSpace, streamInfo.dataSpace);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+            std::string msg = fmt::sprintf("Camera %s:Surface dataSpace doesn't match: %d vs %d",
+                    logicalCameraId.c_str(), dataSpace, streamInfo.dataSpace);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
         //At the native side, there isn't a way to check whether 2 surfaces come from the same
         //surface class type. Use usage flag to approximate the comparison.
         if (consumerUsage != streamInfo.consumerUsage) {
-            String8 msg = String8::format(
+            std::string msg = fmt::sprintf(
                     "Camera %s:Surface usage flag doesn't match %" PRIu64 " vs %" PRIu64 "",
-                    logicalCameraId.string(), consumerUsage, streamInfo.consumerUsage);
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                    logicalCameraId.c_str(), consumerUsage, streamInfo.consumerUsage);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
     }
     return binder::Status::ok();
 }
 
 void mapStreamInfo(const OutputStreamInfo &streamInfo,
-            camera3::camera_stream_rotation_t rotation, String8 physicalId,
+            camera3::camera_stream_rotation_t rotation, const std::string &physicalId,
             int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/) {
     if (stream == nullptr) {
         return;
@@ -656,7 +657,7 @@
     stream->colorSpace = streamInfo.colorSpace;
     stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
     stream->id = -1; // Invalid stream id
-    stream->physicalCameraId = std::string(physicalId.string());
+    stream->physicalCameraId = physicalId;
     stream->bufferSize = 0;
     stream->groupId = groupId;
     stream->sensorPixelModesUsed.resize(streamInfo.sensorPixelModesUsed.size());
@@ -677,34 +678,35 @@
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
-        const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+        const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
         bool isCompositeJpegRDisabled,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit) {
     using SensorPixelMode = aidl::android::hardware::camera::metadata::SensorPixelMode;
     auto operatingMode = sessionConfiguration.getOperatingMode();
-    binder::Status res = checkOperatingMode(operatingMode, deviceInfo, logicalCameraId);
+    binder::Status res = checkOperatingMode(operatingMode, deviceInfo,
+            logicalCameraId);
     if (!res.isOk()) {
         return res;
     }
 
     if (earlyExit == nullptr) {
-        String8 msg("earlyExit nullptr");
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        std::string msg("earlyExit nullptr");
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     *earlyExit = false;
     auto ret = AidlCamera3Device::mapToAidlStreamConfigurationMode(
             static_cast<camera_stream_configuration_mode_t> (operatingMode),
             /*out*/ &streamConfiguration.operationMode);
     if (ret != OK) {
-        String8 msg = String8::format(
+        std::string msg = fmt::sprintf(
             "Camera %s: Failed mapping operating mode %d requested: %s (%d)",
-            logicalCameraId.string(), operatingMode, strerror(-ret), ret);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
+            logicalCameraId.c_str(), operatingMode, strerror(-ret), ret);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
+                msg.c_str());
     }
 
     bool isInputValid = (sessionConfiguration.getInputWidth() > 0) &&
@@ -749,7 +751,7 @@
         const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
             it.getGraphicBufferProducers();
         bool deferredConsumer = it.isDeferred();
-        String8 physicalCameraId = String8(it.getPhysicalCameraId());
+        const std::string &physicalCameraId = it.getPhysicalCameraId();
 
         int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
         int32_t colorSpace = it.getColorSpace();
@@ -841,11 +843,11 @@
                     }
 
                     if (ret != OK) {
-                        String8 msg = String8::format(
+                        std::string msg = fmt::sprintf(
                                 "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.string(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.string());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+                                logicalCameraId.c_str(), strerror(-ret), ret);
+                        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
                     }
 
                     if (compositeStreams.size() == 0) {
@@ -877,17 +879,17 @@
 }
 
 binder::Status checkPhysicalCameraId(
-        const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
-        const String8 &logicalCameraId) {
+        const std::vector<std::string> &physicalCameraIds, const std::string &physicalCameraId,
+        const std::string &logicalCameraId) {
     if (physicalCameraId.size() == 0) {
         return binder::Status::ok();
     }
     if (std::find(physicalCameraIds.begin(), physicalCameraIds.end(),
-        physicalCameraId.string()) == physicalCameraIds.end()) {
-        String8 msg = String8::format("Camera %s: Camera doesn't support physicalCameraId %s.",
-                logicalCameraId.string(), physicalCameraId.string());
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
-        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+        physicalCameraId) == physicalCameraIds.end()) {
+        std::string msg = fmt::sprintf("Camera %s: Camera doesn't support physicalCameraId %s.",
+                logicalCameraId.c_str(), physicalCameraId.c_str());
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     return binder::Status::ok();
 }
@@ -915,13 +917,13 @@
 }
 
 binder::Status checkOperatingMode(int operatingMode,
-        const CameraMetadata &staticInfo, const String8 &cameraId) {
+        const CameraMetadata &staticInfo, const std::string &cameraId) {
     if (operatingMode < 0) {
-        String8 msg = String8::format(
-            "Camera %s: Invalid operating mode %d requested", cameraId.string(), operatingMode);
-        ALOGE("%s: %s", __FUNCTION__, msg.string());
+        std::string msg = fmt::sprintf(
+            "Camera %s: Invalid operating mode %d requested", cameraId.c_str(), operatingMode);
+        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                msg.string());
+                msg.c_str());
     }
 
     bool isConstrainedHighSpeed = (operatingMode == ICameraDeviceUser::CONSTRAINED_HIGH_SPEED_MODE);
@@ -936,12 +938,12 @@
             }
         }
         if (!isConstrainedHighSpeedSupported) {
-            String8 msg = String8::format(
+            std::string msg = fmt::sprintf(
                 "Camera %s: Try to create a constrained high speed configuration on a device"
-                " that doesn't support it.", cameraId.string());
-            ALOGE("%s: %s", __FUNCTION__, msg.string());
+                " that doesn't support it.", cameraId.c_str());
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
-                    msg.string());
+                    msg.c_str());
         }
     }
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index dc143da..79d80ea 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -21,6 +21,7 @@
 #include <camera/camera2/OutputConfiguration.h>
 #include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/SubmitInfo.h>
+#include <camera/StringUtils.h>
 #include <aidl/android/hardware/camera/device/ICameraDevice.h>
 #include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
 #include <android/hardware/camera/device/3.7/ICameraDeviceSession.h>
@@ -37,17 +38,18 @@
 
 #define STATUS_ERROR(errorCode, errorString) \
     binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: %s", __FUNCTION__, __LINE__, errorString))
+            fmt::sprintf("%s:%d: %s", __FUNCTION__, __LINE__, errorString).c_str())
 
 #define STATUS_ERROR_FMT(errorCode, errorString, ...) \
     binder::Status::fromServiceSpecificError(errorCode, \
-            String8::format("%s:%d: " errorString, __FUNCTION__, __LINE__, \
-                    __VA_ARGS__))
+            fmt::sprintf("%s:%d: " errorString, __FUNCTION__, __LINE__, \
+                    __VA_ARGS__).c_str())
 
 namespace android {
 namespace camera3 {
 
-typedef std::function<CameraMetadata (const String8 &, bool overrideForPerfClass)> metadataGetter;
+typedef std::function<CameraMetadata (const std::string &, bool overrideForPerfClass)>
+        metadataGetter;
 
 class StreamConfiguration {
 public:
@@ -96,7 +98,7 @@
 binder::Status createSurfaceFromGbp(
         camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
         sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-        const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+        const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
         const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
         int64_t streamUseCase, int timestampBase, int mirrorMode,
         int32_t colorSpace);
@@ -120,26 +122,27 @@
 bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
 
 void mapStreamInfo(const OutputStreamInfo &streamInfo,
-        camera3::camera_stream_rotation_t rotation, String8 physicalId,
+        camera3::camera_stream_rotation_t rotation, const std::string &physicalId,
         int32_t groupId, aidl::android::hardware::camera::device::Stream *stream /*out*/);
 
 // Check that the physicalCameraId passed in is spported by the camera
 // device.
 binder::Status checkPhysicalCameraId(
-const std::vector<std::string> &physicalCameraIds, const String8 &physicalCameraId,
-const String8 &logicalCameraId);
+const std::vector<std::string> &physicalCameraIds, const std::string &physicalCameraId,
+const std::string &logicalCameraId);
 
 binder::Status checkSurfaceType(size_t numBufferProducers,
 bool deferredConsumer, int surfaceType);
 
 binder::Status checkOperatingMode(int operatingMode,
-const CameraMetadata &staticInfo, const String8 &cameraId);
+const CameraMetadata &staticInfo, const std::string &cameraId);
 
 binder::Status
 convertToHALStreamCombination(
     const SessionConfiguration& sessionConfiguration,
-    const String8 &logicalCameraId, const CameraMetadata &deviceInfo, bool isCompositeJpegRDisabled,
-    metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
+    const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
+    bool isCompositeJpegRDisabled, metadataGetter getMetadata,
+    const std::vector<std::string> &physicalCameraIds,
     aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration,
     bool overrideForPerfClass, bool *earlyExit);
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index 111c1bf..cf93d3b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -105,7 +105,7 @@
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
-        const String8 &logicalCameraId, const CameraMetadata &deviceInfo,
+        const std::string &logicalCameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
index c47abe0..eae2e30 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.h
@@ -34,7 +34,7 @@
 // isn't valid.
 binder::Status
 convertToHALStreamCombination(const SessionConfiguration& sessionConfiguration,
-        const String8 &cameraId, const CameraMetadata &deviceInfo,
+        const std::string &cameraId, const CameraMetadata &deviceInfo,
         metadataGetter getMetadata, const std::vector<std::string> &physicalCameraIds,
         hardware::camera::device::V3_7::StreamConfiguration &streamConfiguration,
         bool overrideForPerfClass, bool *earlyExit);
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index d1e54ab..38de93a 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -18,11 +18,15 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <iostream>
+#include <sstream>
+
 #include "TagMonitor.h"
 
 #include <inttypes.h>
 #include <utils/Log.h>
 #include <camera/VendorTagDescriptor.h>
+#include <camera/StringUtils.h>
 #include <camera_metadata_hidden.h>
 #include <device3/Camera3Stream.h>
 
@@ -44,7 +48,7 @@
         mMonitoringEvents(other.mMonitoringEvents),
         mVendorTagId(other.mVendorTagId) {}
 
-const String16 TagMonitor::kMonitorOption = String16("-m");
+const std::string TagMonitor::kMonitorOption("-m");
 
 const char* TagMonitor::k3aTags =
         "android.control.aeMode, android.control.afMode, android.control.awbMode,"
@@ -56,17 +60,17 @@
         "android.control.effectMode, android.control.mode, android.control.sceneMode,"
         "android.control.videoStabilizationMode";
 
-void TagMonitor::parseTagsToMonitor(String8 tagNames) {
+void TagMonitor::parseTagsToMonitor(std::string tagNames) {
     std::lock_guard<std::mutex> lock(mMonitorMutex);
 
     // Expand shorthands
     ssize_t idx = tagNames.find("3a");
     if (idx != -1) {
         ssize_t end = tagNames.find(",", idx);
-        char* start = tagNames.lockBuffer(tagNames.size());
-        start[idx] = '\0';
-        char* rest = (end != -1) ? (start + end) : (start + tagNames.size());
-        tagNames = String8::format("%s%s%s", start, k3aTags, rest);
+        tagNames = tagNames.substr(0, idx) + k3aTags;
+        if (end != -1) {
+            tagNames += tagNames.substr(end);
+        }
     }
 
     sp<VendorTagDescriptor> vTags =
@@ -81,14 +85,13 @@
 
     bool gotTag = false;
 
-    char *tokenized = tagNames.lockBuffer(tagNames.size());
-    char *savePtr;
-    char *nextTagName = strtok_r(tokenized, ", ", &savePtr);
-    while (nextTagName != nullptr) {
+    std::istringstream iss(tagNames);
+    std::string nextTagName;
+    while (std::getline(iss, nextTagName, ',')) {
         uint32_t tag;
-        status_t res = CameraMetadata::getTagFromName(nextTagName, vTags.get(), &tag);
+        status_t res = CameraMetadata::getTagFromName(nextTagName.c_str(), vTags.get(), &tag);
         if (res != OK) {
-            ALOGW("%s: Unknown tag %s, ignoring", __FUNCTION__, nextTagName);
+            ALOGW("%s: Unknown tag %s, ignoring", __FUNCTION__, nextTagName.c_str());
         } else {
             if (!gotTag) {
                 mMonitoredTagList.clear();
@@ -96,11 +99,9 @@
             }
             mMonitoredTagList.push_back(tag);
         }
-        nextTagName = strtok_r(nullptr, ", ", &savePtr);
+        iss >> std::ws;
     }
 
-    tagNames.unlockBuffer();
-
     if (gotTag) {
         // Got at least one new tag
         mMonitoringEnabled = true;
@@ -269,7 +270,7 @@
 
     for (const auto& event : mMonitoringEvents) {
         int indentation = (event.source == REQUEST) ? 15 : 30;
-        String8 eventString = String8::format("f%d:%" PRId64 "ns:%*s%*s",
+        std::string eventString = fmt::sprintf("f%d:%" PRId64 "ns:%*s%*s",
                 event.frameNumber, event.timestamp,
                 2, event.cameraId.c_str(),
                 indentation,
@@ -278,20 +279,20 @@
         if (!event.outputStreamIds.empty()) {
             eventString += " output stream ids:";
             for (const auto& id : event.outputStreamIds) {
-                eventString.appendFormat(" %d", id);
+                eventString += fmt::sprintf(" %d", id);
             }
             eventString += "\n";
-            vec.emplace_back(eventString.string());
+            vec.emplace_back(eventString);
             continue;
         }
 
         if (event.inputStreamId != -1) {
-            eventString.appendFormat(" input stream id: %d\n", event.inputStreamId);
-            vec.emplace_back(eventString.string());
+            eventString += fmt::sprintf(" input stream id: %d\n", event.inputStreamId);
+            vec.emplace_back(eventString);
             continue;
         }
 
-        eventString += String8::format(
+        eventString += fmt::sprintf(
                 "%s.%s: ",
                 get_local_camera_metadata_section_name_vendor_id(event.tag, mVendorTagId),
                 get_local_camera_metadata_tag_name_vendor_id(event.tag, mVendorTagId));
@@ -303,14 +304,14 @@
                     event.newData.data(), event.tag, event.type,
                     event.newData.size() / camera_metadata_type_size[event.type], indentation + 18);
         }
-        vec.emplace_back(eventString.string());
+        vec.emplace_back(eventString);
     }
 }
 
 #define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 29
 
-String8 TagMonitor::getEventDataString(const uint8_t* data_ptr, uint32_t tag, int type, int count,
-                                       int indentation) {
+std::string TagMonitor::getEventDataString(const uint8_t* data_ptr, uint32_t tag, int type,
+        int count, int indentation) {
     static int values_per_line[NUM_TYPES] = {
         [TYPE_BYTE]     = 16,
         [TYPE_INT32]    = 8,
@@ -327,11 +328,11 @@
     int lines = count / values_per_line[type];
     if (count % values_per_line[type] != 0) lines++;
 
-    String8 returnStr = String8();
+    std::ostringstream returnStr;
     int index = 0;
     int j, k;
     for (j = 0; j < lines; j++) {
-        returnStr.appendFormat("%*s[", (j != 0) ? indentation + 4 : 0, "");
+        returnStr << fmt::sprintf("%*s[", (j != 0) ? indentation + 4 : 0, "");
         for (k = 0;
              k < values_per_line[type] && count > 0;
              k++, count--, index += type_size) {
@@ -344,9 +345,9 @@
                                                      value_string_tmp,
                                                      sizeof(value_string_tmp))
                         == OK) {
-                        returnStr += value_string_tmp;
+                        returnStr << value_string_tmp;
                     } else {
-                        returnStr.appendFormat("%hhu ", *(data_ptr + index));
+                        returnStr << fmt::sprintf("%hhu ", *(data_ptr + index));
                     }
                     break;
                 case TYPE_INT32:
@@ -357,33 +358,33 @@
                                                      value_string_tmp,
                                                      sizeof(value_string_tmp))
                         == OK) {
-                        returnStr += value_string_tmp;
+                        returnStr << value_string_tmp;
                     } else {
-                        returnStr.appendFormat("%" PRId32 " ", *(int32_t*)(data_ptr + index));
+                        returnStr << fmt::sprintf("%" PRId32 " ", *(int32_t*)(data_ptr + index));
                     }
                     break;
                 case TYPE_FLOAT:
-                    returnStr.appendFormat("%0.8f ", *(float*)(data_ptr + index));
+                    returnStr << fmt::sprintf("%0.8f ", *(float*)(data_ptr + index));
                     break;
                 case TYPE_INT64:
-                    returnStr.appendFormat("%" PRId64 " ", *(int64_t*)(data_ptr + index));
+                    returnStr << fmt::sprintf("%" PRId64 " ", *(int64_t*)(data_ptr + index));
                     break;
                 case TYPE_DOUBLE:
-                    returnStr.appendFormat("%0.8f ", *(double*)(data_ptr + index));
+                    returnStr << fmt::sprintf("%0.8f ", *(double*)(data_ptr + index));
                     break;
                 case TYPE_RATIONAL: {
                     int32_t numerator = *(int32_t*)(data_ptr + index);
                     int32_t denominator = *(int32_t*)(data_ptr + index + 4);
-                    returnStr.appendFormat("(%d / %d) ", numerator, denominator);
+                    returnStr << fmt::sprintf("(%d / %d) ", numerator, denominator);
                     break;
                 }
                 default:
-                    returnStr += "??? ";
+                    returnStr << "??? ";
             }
         }
-        returnStr += "]\n";
+        returnStr << "]\n";
     }
-    return returnStr;
+    return std::move(returnStr.str());
 }
 
 template<typename T>
diff --git a/services/camera/libcameraservice/utils/TagMonitor.h b/services/camera/libcameraservice/utils/TagMonitor.h
index 9ded15d..bff4de0 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.h
+++ b/services/camera/libcameraservice/utils/TagMonitor.h
@@ -21,9 +21,9 @@
 #include <atomic>
 #include <mutex>
 #include <unordered_map>
+#include <string>
 
 #include <utils/RefBase.h>
-#include <utils/String8.h>
 #include <utils/Timers.h>
 
 #include <media/RingBuffer.h>
@@ -42,7 +42,7 @@
   public:
 
     // Monitor argument
-    static const String16 kMonitorOption;
+    static const std::string kMonitorOption;
 
     enum eventSource {
         REQUEST,
@@ -59,7 +59,7 @@
     // If invalid, do nothing.
     // Recognizes "3a" as a shortcut for enabling tracking 3A state, mode, and
     // triggers
-    void parseTagsToMonitor(String8 tagNames);
+    void parseTagsToMonitor(std::string tagNames);
 
     // Disable monitoring; does not clear the event log
     void disableMonitoring();
@@ -85,8 +85,8 @@
     // function.
     void dumpMonitoredTagEventsToVectorLocked(std::vector<std::string> &out);
 
-    static String8 getEventDataString(const uint8_t* data_ptr, uint32_t tag, int type, int count,
-                                      int indentation);
+    static std::string getEventDataString(const uint8_t* data_ptr, uint32_t tag, int type,
+            int count, int indentation);
 
     void monitorSingleMetadata(TagMonitor::eventSource source, int64_t frameNumber,
             nsecs_t timestamp, const std::string& cameraId, uint32_t tag,
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index 59d1ae4..bd4ac38 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -242,6 +242,7 @@
     "channel_count_hardware",
     "sample_rate_hardware",
     "uid",
+    "sample_rate_client",
 };
 
 static constexpr const char * HeadTrackerDeviceEnabledFields[] {
@@ -1379,6 +1380,10 @@
 
     const auto uid = item->getUid();
 
+    int32_t sampleRateClient = 0;
+    mAudioAnalytics.mAnalyticsState->timeMachine().get(
+            key, AMEDIAMETRICS_PROP_SAMPLERATECLIENT, &sampleRateClient);
+
     LOG(LOG_LEVEL) << "key:" << key
             << " path:" << path
             << " direction:" << direction << "(" << directionStr << ")"
@@ -1402,7 +1407,8 @@
             << " format_hardware:" << formatHardware << "(" << formatHardwareStr << ")"
             << " channel_count_hardware:" << channelCountHardware
             << " sample_rate_hardware: " << sampleRateHardware
-            << " uid: " << uid;
+            << " uid: " << uid
+            << " sample_rate_client: " << sampleRateClient;
 
     if (mAudioAnalytics.mDeliverStatistics) {
         const stats::media_metrics::BytesField bf_serialized(
@@ -1431,6 +1437,7 @@
                 , channelCountHardware
                 , sampleRateHardware
                 , uid
+                , sampleRateClient
                 );
         std::stringstream ss;
         ss << "result:" << result;
@@ -1458,6 +1465,7 @@
                 , channelCountHardware
                 , sampleRateHardware
                 , uid
+                , sampleRateClient
                 );
         ss << " " << fieldsStr;
         std::string str = ss.str();
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index d98974f..f38a085 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -27,21 +27,18 @@
     default_applicable_licenses: ["frameworks_av_license"],
 }
 
-cc_fuzz {
-    name: "mediaresourcemanager_fuzzer",
-    srcs: [
-        "mediaresourcemanager_fuzzer.cpp",
+cc_defaults {
+    name: "mediaresourcemanager_fuzzer_defaults",
+    defaults: [
+        "service_fuzzer_defaults",
     ],
     static_libs: [
         "liblog",
         "libresourcemanagerservice",
     ],
     shared_libs: [
-        "libbinder",
-        "libbinder_ndk",
         "libmedia",
         "libmediautils",
-        "libutils",
         "libstats_media_metrics",
         "libstatspull",
         "libstatssocket",
@@ -54,3 +51,39 @@
         componentid: 155276,
     },
 }
+
+cc_fuzz {
+    name: "mediaresourcemanager_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "mediaresourcemanager_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourcemanager_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    srcs: [
+        "resourcemanager_service_fuzzer.cpp",
+    ],
+}
+
+cc_fuzz {
+    name: "resourceobserver_service_fuzzer",
+    defaults: [
+        "mediaresourcemanager_fuzzer_defaults",
+    ],
+    static_libs: [
+        "resourceobserver_aidl_interface-V1-ndk",
+    ],
+    srcs: [
+        "resourceobserver_service_fuzzer.cpp",
+    ],
+    fuzz_config: {
+        triage_assignee: "waghpawan@google.com",
+    },
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
new file mode 100644
index 0000000..ca10d20
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceManagerService.h"
+
+using android::fuzzService;
+using android::ResourceManagerService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceManagerService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
new file mode 100644
index 0000000..e69368d
--- /dev/null
+++ b/services/mediaresourcemanager/fuzzer/resourceobserver_service_fuzzer.cpp
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/binder_interface_utils.h>
+
+#include <fuzzbinder/libbinder_ndk_driver.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "ResourceObserverService.h"
+
+using android::fuzzService;
+using android::ResourceObserverService;
+using ndk::SharedRefBase;
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   auto service = SharedRefBase::make<ResourceObserverService>();
+   fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
+   return 0;
+}
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 7f228c7..2ef6fe5 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -45,6 +45,8 @@
 #define OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS  (3 * AAUDIO_NANOS_PER_MILLISECOND)
 #define INPUT_ESTIMATED_HARDWARE_OFFSET_NANOS   (-1 * AAUDIO_NANOS_PER_MILLISECOND)
 
+#define AAUDIO_MAX_OPEN_ATTEMPTS    10
+
 using namespace android;  // TODO just import names needed
 using namespace aaudio;   // TODO just import names needed
 
@@ -77,14 +79,23 @@
         {AUDIO_FORMAT_PCM_24_BIT_PACKED, AUDIO_FORMAT_PCM_16_BIT}
 };
 
-audio_format_t getNextFormatToTry(audio_format_t curFormat, audio_format_t returnedFromAPM) {
-    if (returnedFromAPM != AUDIO_FORMAT_DEFAULT) {
-        return returnedFromAPM;
-    }
+audio_format_t getNextFormatToTry(audio_format_t curFormat) {
     const auto it = NEXT_FORMAT_TO_TRY.find(curFormat);
-    return it != NEXT_FORMAT_TO_TRY.end() ? it->second : AUDIO_FORMAT_DEFAULT;
+    return it != NEXT_FORMAT_TO_TRY.end() ? it->second : curFormat;
 }
 
+struct configComp {
+    bool operator() (const audio_config_base_t& lhs, const audio_config_base_t& rhs) const {
+        if (lhs.sample_rate != rhs.sample_rate) {
+            return lhs.sample_rate < rhs.sample_rate;
+        } else if (lhs.channel_mask != rhs.channel_mask) {
+            return lhs.channel_mask < rhs.channel_mask;
+        } else {
+            return lhs.format < rhs.format;
+        }
+    }
+};
+
 } // namespace
 
 aaudio_result_t AAudioServiceEndpointMMAP::open(const aaudio::AAudioStreamRequest &request) {
@@ -101,60 +112,66 @@
         legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
 
     audio_format_t audioFormat = getFormat();
-    std::set<audio_format_t> formatsTried;
-    while (true) {
-        if (formatsTried.find(audioFormat) != formatsTried.end()) {
+    int32_t sampleRate = getSampleRate();
+    if (sampleRate == AAUDIO_UNSPECIFIED) {
+        sampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
+    }
+
+    const aaudio_direction_t direction = getDirection();
+    audio_config_base_t config;
+    config.format = audioFormat;
+    config.sample_rate = sampleRate;
+    config.channel_mask = AAudio_getChannelMaskForOpen(
+            getChannelMask(), getSamplesPerFrame(), direction == AAUDIO_DIRECTION_INPUT);
+
+    std::set<audio_config_base_t, configComp> configsTried;
+    int32_t numberOfAttempts = 0;
+    while (numberOfAttempts < AAUDIO_MAX_OPEN_ATTEMPTS) {
+        if (configsTried.find(config) != configsTried.end()) {
             // APM returning something that has already tried.
-            ALOGW("Have already tried to open with format=%#x, but failed before", audioFormat);
+            ALOGW("Have already tried to open with format=%#x and sr=%d, but failed before",
+                  config.format, config.sample_rate);
             break;
         }
-        formatsTried.insert(audioFormat);
+        configsTried.insert(config);
 
-        audio_format_t nextFormatToTry = AUDIO_FORMAT_DEFAULT;
-        result = openWithFormat(audioFormat, &nextFormatToTry);
+        audio_config_base_t previousConfig = config;
+        result = openWithConfig(&config);
         if (result != AAUDIO_ERROR_UNAVAILABLE) {
             // Return if it is successful or there is an error that is not
             // AAUDIO_ERROR_UNAVAILABLE happens.
-            ALOGI("Opened format=%#x with result=%d", audioFormat, result);
+            ALOGI("Opened format=%#x sr=%d, with result=%d", previousConfig.format,
+                    previousConfig.sample_rate, result);
             break;
         }
 
-        nextFormatToTry = getNextFormatToTry(audioFormat, nextFormatToTry);
-        ALOGD("%s() %#x failed, perhaps due to format. Try again with %#x",
-              __func__, audioFormat, nextFormatToTry);
-        audioFormat = nextFormatToTry;
-        if (audioFormat == AUDIO_FORMAT_DEFAULT) {
-            // Nothing else to try
-            break;
+        // Try other formats if the config from APM is the same as our current config.
+        // Some HALs may report its format support incorrectly.
+        if ((previousConfig.format == config.format) &&
+                (previousConfig.sample_rate == config.sample_rate)) {
+            config.format = getNextFormatToTry(config.format);
         }
+
+        ALOGD("%s() %#x %d failed, perhaps due to format or sample rate. Try again with %#x %d",
+                __func__, previousConfig.format, previousConfig.sample_rate, config.format,
+                config.sample_rate);
+        numberOfAttempts++;
     }
     return result;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::openWithFormat(
-        audio_format_t audioFormat, audio_format_t* nextFormatToTry) {
+aaudio_result_t AAudioServiceEndpointMMAP::openWithConfig(
+        audio_config_base_t* config) {
     aaudio_result_t result = AAUDIO_OK;
-    audio_config_base_t config;
+    audio_config_base_t currentConfig = *config;
     audio_port_handle_t deviceId;
 
     const audio_attributes_t attributes = getAudioAttributesFrom(this);
 
     deviceId = mRequestedDeviceId;
 
-    // Fill in config
-    config.format = audioFormat;
-
-    int32_t aaudioSampleRate = getSampleRate();
-    if (aaudioSampleRate == AAUDIO_UNSPECIFIED) {
-        aaudioSampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
-    }
-    config.sample_rate = aaudioSampleRate;
-
     const aaudio_direction_t direction = getDirection();
 
-    config.channel_mask = AAudio_getChannelMaskForOpen(
-            getChannelMask(), getSamplesPerFrame(), direction == AAUDIO_DIRECTION_INPUT);
-
     if (direction == AAUDIO_DIRECTION_OUTPUT) {
         mHardwareTimeOffsetNanos = OUTPUT_ESTIMATED_HARDWARE_OFFSET_NANOS; // frames at DAC later
 
@@ -177,11 +194,11 @@
     // Open HAL stream. Set mMmapStream
     ALOGD("%s trying to open MMAP stream with format=%#x, "
           "sample_rate=%u, channel_mask=%#x, device=%d",
-          __func__, config.format, config.sample_rate,
-          config.channel_mask, deviceId);
+          __func__, config->format, config->sample_rate,
+          config->channel_mask, deviceId);
     const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
                                                                 &attributes,
-                                                                &config,
+                                                                config,
                                                                 mMmapClient,
                                                                 &deviceId,
                                                                 &sessionId,
@@ -195,9 +212,9 @@
         // not match the hardware.
         ALOGD("%s() - openMmapStream() returned status=%d, suggested format=%#x, sample_rate=%u, "
               "channel_mask=%#x",
-              __func__, status, config.format, config.sample_rate, config.channel_mask);
-        *nextFormatToTry = config.format != audioFormat ? config.format
-                                                        : *nextFormatToTry;
+              __func__, status, config->format, config->sample_rate, config->channel_mask);
+        // Keep the channel mask of the current config
+        config->channel_mask = currentConfig.channel_mask;
         return AAUDIO_ERROR_UNAVAILABLE;
     }
 
@@ -217,7 +234,7 @@
     setSessionId(actualSessionId);
 
     ALOGD("%s(format = 0x%X) deviceId = %d, sessionId = %d",
-          __func__, audioFormat, getDeviceId(), getSessionId());
+          __func__, config->format, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
     result = createMmapBuffer();
@@ -227,11 +244,11 @@
 
     // Get information about the stream and pass it back to the caller.
     setChannelMask(AAudioConvert_androidToAAudioChannelMask(
-            config.channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
-            AAudio_isChannelIndexMask(config.channel_mask)));
+            config->channel_mask, getDirection() == AAUDIO_DIRECTION_INPUT,
+            AAudio_isChannelIndexMask(config->channel_mask)));
 
-    setFormat(config.format);
-    setSampleRate(config.sample_rate);
+    setFormat(config->format);
+    setSampleRate(config->sample_rate);
     setHardwareSampleRate(getSampleRate());
     setHardwareFormat(getFormat());
     setHardwareSamplesPerFrame(AAudioConvert_channelMaskToCount(getChannelMask()));
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index 38cf0ba..f19005c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -97,7 +97,7 @@
 
 private:
 
-    aaudio_result_t openWithFormat(audio_format_t audioFormat, audio_format_t* nextFormatToTry);
+    aaudio_result_t openWithConfig(audio_config_base_t* config);
 
     aaudio_result_t createMmapBuffer();
 
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index bc7ccde..8f51ce4 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -220,18 +220,6 @@
         return mSuspended;
     }
 
-    bool isCloseNeeded() const {
-        return mCloseNeeded.load();
-    }
-
-    /**
-     * Mark this stream as needing to be closed.
-     * Once marked for closing, it cannot be unmarked.
-     */
-    void markCloseNeeded() {
-        mCloseNeeded.store(true);
-    }
-
     virtual const char *getTypeText() const { return "Base"; }
 
 protected:
@@ -419,12 +407,8 @@
     aaudio_handle_t         mHandle = -1;
     bool                    mFlowing = false;
 
-    // This indicates that a stream that is being referenced by a binder call
-    // and needs to closed.
-    std::atomic<bool>       mCloseNeeded{false}; // TODO remove
-
     // This indicate that a running stream should not be processed because of an error,
-    // for example a full message queue. Note that this atomic is unrelated to mCloseNeeded.
+    // for example a full message queue.
     std::atomic<bool>       mSuspended{false};
 
     bool                    mDisconnected GUARDED_BY(mLock) {false};
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index 549fa59..502d773 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -75,7 +75,9 @@
 
 aaudio_result_t AAudioThread::stop() {
     if (!mHasThread) {
-        ALOGE("stop() but no thread running");
+        // There can be cases that the thread is just created but not started.
+        // Logging as warning to attract attention but not too serious.
+        ALOGW("stop() but no thread running");
         return AAUDIO_ERROR_INVALID_STATE;
     }
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index c5080a4..3521979 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -78,12 +78,38 @@
 
 ]
 
+cc_defaults {
+    name: "libaaudioservice_dependencies",
 
-cc_library {
+    shared_libs: [
+        "libaaudio_internal",
+        "libaudioclient",
+        "libaudioutils",
+        "libmedia_helper",
+        "libmediametrics",
+        "libmediautils",
+        "libbase",
+        "libbinder",
+        "libcutils",
+        "liblog",
+        "libutils",
+        "aaudio-aidl-cpp",
+        "framework-permission-aidl-cpp",
+        "libaudioclient_aidl_conversion",
+        "packagemanager_aidl-cpp",
+    ],
+
+    static_libs: [
+        "libaudioflinger",
+    ]
+}
+
+cc_library_static {
 
     name: "libaaudioservice",
 
     defaults: [
+        "libaaudioservice_dependencies",
         "latest_android_media_audio_common_types_cpp_shared",
     ],
 
@@ -116,25 +142,6 @@
         "-Werror",
     ],
 
-    shared_libs: [
-        "libaaudio_internal",
-        "libaudioclient",
-        "libaudioflinger",
-        "libaudioutils",
-        "libmedia_helper",
-        "libmediametrics",
-        "libmediautils",
-        "libbase",
-        "libbinder",
-        "libcutils",
-        "liblog",
-        "libutils",
-        "aaudio-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
-        "packagemanager_aidl-cpp",
-    ],
-
     export_shared_lib_headers: [
         "libaaudio_internal",
         "framework-permission-aidl-cpp",
diff --git a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
index f047065..f5c2e6c 100644
--- a/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
+++ b/services/oboeservice/fuzzer/oboeservice_fuzzer.cpp
@@ -403,13 +403,6 @@
 
     request.getConfiguration().setBufferCapacity(fdp.ConsumeIntegral<int32_t>());
 
-    request.getConfiguration().setHardwareSampleRate(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareSamplesPerFrame(fdp.ConsumeIntegral<int32_t>());
-    request.getConfiguration().setHardwareFormat((audio_format_t)(
-        fdp.ConsumeBool()
-            ? fdp.ConsumeIntegral<int32_t>()
-            : kAAudioFormats[fdp.ConsumeIntegralInRange<int32_t>(0, kNumAAudioFormats - 1)]));
-
     auto streamHandleInfo = mClient->openStream(request, configurationOutput);
     if (streamHandleInfo.getHandle() < 0) {
         // invalid request, stream not opened.
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index ea5139d..e29d520 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -86,6 +86,7 @@
         "android.hardware.tv.tuner@1.1",
         "android.hardware.tv.tuner-V2-ndk",
         "libbase",
+        "libcutils",
         "libbinder",
         "libfmq",
         "libhidlbase",
diff --git a/services/tuner/main_tunerservice.cpp b/services/tuner/main_tunerservice.cpp
index 90f1731..acfaf8a 100644
--- a/services/tuner/main_tunerservice.cpp
+++ b/services/tuner/main_tunerservice.cpp
@@ -17,6 +17,7 @@
 #include <android-base/logging.h>
 #include <binder/IPCThreadState.h>
 #include <binder/IServiceManager.h>
+#include <cutils/properties.h>
 #include <utils/Log.h>
 #include <hidl/HidlTransportSupport.h>
 
@@ -31,6 +32,11 @@
 int main() {
     ALOGD("Tuner service starting");
 
+    if (!property_get_bool("tuner.server.enable", false)) {
+        ALOGD("tuner is not enabled, terminating");
+        return 0;
+    }
+
     sp<ProcessState> proc(ProcessState::self());
     sp<IServiceManager> sm = defaultServiceManager();
     hardware::configureRpcThreadpool(16, true);