cameraservice: Migrate all internal String8/String16s to std::string

String8 and String16 are deprecated classes. It is recommended to use
std::string or std::u16string wherever possible. String16 is the native
string class for aidl, but Strings marked @utf8InCpp can use std::string
directly.

This patch standardizes libcameraservice's use of strings to
std::string, which is capable of storing utf-8 strings. This makes the
code more readable and potentially reduces the number of string copies
to a minimum.

A new set of string utils is added to frameworks/av/camera to aid this
migration.

Bug: 265487852
Test: Presubmit, ran CtsCameraTestCases on Cuttlefish, adb shell dumpsys media camera and observed output
Change-Id: I2b258a8636030dc4b7751140db43981b39c64f0d
Merged-In: I59330ac03c8a52b6c21a2388bba0c143e68af4cf
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index ad32682..68c28e9 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -18,11 +18,14 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <sstream>
+
 #include <inttypes.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
 #include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
@@ -52,9 +55,9 @@
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         const sp<hardware::ICameraClient>& cameraClient,
-        const String16& clientPackageName,
-        const std::optional<String16>& clientFeatureId,
-        const String8& cameraDeviceId,
+        const std::string& clientPackageName,
+        const std::optional<std::string>& clientFeatureId,
+        const std::string& cameraDeviceId,
         int api1CameraId,
         int cameraFacing,
         int sensorOrientation,
@@ -84,7 +87,8 @@
     }
 }
 
-status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
+status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
+        const std::string& monitorTags) {
     return initializeImpl(manager, monitorTags);
 }
 
@@ -104,7 +108,7 @@
 }
 
 template<typename TProviderPtr>
-status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
+status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags)
 {
     ATRACE_CALL();
     ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
@@ -136,37 +140,23 @@
     CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
             &mRotateAndCropPreviewTransform);
 
-    String8 threadName;
-
     mStreamingProcessor = new StreamingProcessor(this);
-    threadName = String8::format("C2-%d-StreamProc",
-            mCameraId);
 
+    std::string threadName = std::string("C2-") + std::to_string(mCameraId);
     mFrameProcessor = new FrameProcessor(mDevice, this);
-    threadName = String8::format("C2-%d-FrameProc",
-            mCameraId);
-    mFrameProcessor->run(threadName.string());
+    mFrameProcessor->run((threadName + "-FrameProc").c_str());
 
     mCaptureSequencer = new CaptureSequencer(this);
-    threadName = String8::format("C2-%d-CaptureSeq",
-            mCameraId);
-    mCaptureSequencer->run(threadName.string());
+    mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
 
     mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
-    threadName = String8::format("C2-%d-JpegProc",
-            mCameraId);
-    mJpegProcessor->run(threadName.string());
+    mJpegProcessor->run((threadName + "-JpegProc").c_str());
 
     mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
-
-    threadName = String8::format("C2-%d-ZslProc",
-            mCameraId);
-    mZslProcessor->run(threadName.string());
+    mZslProcessor->run((threadName + "-ZslProc").c_str());
 
     mCallbackProcessor = new CallbackProcessor(this);
-    threadName = String8::format("C2-%d-CallbkProc",
-            mCameraId);
-    mCallbackProcessor->run(threadName.string());
+    mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
 
     if (gLogLevel >= 1) {
         SharedParameters::Lock l(mParameters);
@@ -194,47 +184,47 @@
 }
 
 status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
-    String8 result;
-    result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
+    std::ostringstream result;
+    result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
             (getRemoteCallback() != NULL ?
-                    (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
+                    (void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
             mClientPid);
-    result.append("  State: ");
-#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
+    result << "  State: ";
+#define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
 
     const Parameters& p = mParameters.unsafeAccess();
 
-    result.append(Parameters::getStateName(p.state));
+    result << Parameters::getStateName(p.state);
 
-    result.append("\n  Current parameters:\n");
-    result.appendFormat("    Preview size: %d x %d\n",
+    result << "\n  Current parameters:\n";
+    result << fmt::sprintf("    Preview size: %d x %d\n",
             p.previewWidth, p.previewHeight);
-    result.appendFormat("    Preview FPS range: %d - %d\n",
+    result << fmt::sprintf("    Preview FPS range: %d - %d\n",
             p.previewFpsRange[0], p.previewFpsRange[1]);
-    result.appendFormat("    Preview HAL pixel format: 0x%x\n",
+    result << fmt::sprintf("    Preview HAL pixel format: 0x%x\n",
             p.previewFormat);
-    result.appendFormat("    Preview transform: %x\n",
+    result << fmt::sprintf("    Preview transform: %x\n",
             p.previewTransform);
-    result.appendFormat("    Picture size: %d x %d\n",
+    result << fmt::sprintf("    Picture size: %d x %d\n",
             p.pictureWidth, p.pictureHeight);
-    result.appendFormat("    Jpeg thumbnail size: %d x %d\n",
+    result << fmt::sprintf("    Jpeg thumbnail size: %d x %d\n",
             p.jpegThumbSize[0], p.jpegThumbSize[1]);
-    result.appendFormat("    Jpeg quality: %d, thumbnail quality: %d\n",
+    result << fmt::sprintf("    Jpeg quality: %d, thumbnail quality: %d\n",
             p.jpegQuality, p.jpegThumbQuality);
-    result.appendFormat("    Jpeg rotation: %d\n", p.jpegRotation);
-    result.appendFormat("    GPS tags %s\n",
+    result << fmt::sprintf("    Jpeg rotation: %d\n", p.jpegRotation);
+    result << fmt::sprintf("    GPS tags %s\n",
             p.gpsEnabled ? "enabled" : "disabled");
     if (p.gpsEnabled) {
-        result.appendFormat("    GPS lat x long x alt: %f x %f x %f\n",
+        result << fmt::sprintf("    GPS lat x long x alt: %f x %f x %f\n",
                 p.gpsCoordinates[0], p.gpsCoordinates[1],
                 p.gpsCoordinates[2]);
-        result.appendFormat("    GPS timestamp: %" PRId64 "\n",
+        result << fmt::sprintf("    GPS timestamp: %" PRId64 "\n",
                 p.gpsTimestamp);
-        result.appendFormat("    GPS processing method: %s\n",
+        result << fmt::sprintf("    GPS processing method: %s\n",
                 p.gpsProcessingMethod.string());
     }
 
-    result.append("    White balance mode: ");
+    result << "    White balance mode: ";
     switch (p.wbMode) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
@@ -244,10 +234,10 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Effect mode: ");
+    result << "    Effect mode: ";
     switch (p.effectMode) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
@@ -258,22 +248,22 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
         CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Antibanding mode: ");
+    result << "    Antibanding mode: ";
     switch (p.antibandingMode) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Scene mode: ");
+    result << "    Scene mode: ";
     switch (p.sceneMode) {
         case ANDROID_CONTROL_SCENE_MODE_DISABLED:
-            result.append("AUTO\n"); break;
+            result << "AUTO\n"; break;
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
@@ -290,10 +280,10 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
         CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Flash mode: ");
+    result << "    Flash mode: ";
     switch (p.flashMode) {
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
@@ -301,10 +291,10 @@
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
         CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Focus mode: ");
+    result << "    Focus mode: ";
     switch (p.focusMode) {
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
@@ -314,10 +304,10 @@
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
         CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("   Focus state: ");
+    result << "   Focus state: ";
     switch (p.focusState) {
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
@@ -326,12 +316,12 @@
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
         CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
-        default: result.append("UNKNOWN\n");
+        default: result << "UNKNOWN\n";
     }
 
-    result.append("    Focusing areas:\n");
+    result << "    Focusing areas:\n";
     for (size_t i = 0; i < p.focusingAreas.size(); i++) {
-        result.appendFormat("      [ (%d, %d, %d, %d), weight %d ]\n",
+        result << fmt::sprintf("      [ (%d, %d, %d, %d), weight %d ]\n",
                 p.focusingAreas[i].left,
                 p.focusingAreas[i].top,
                 p.focusingAreas[i].right,
@@ -339,16 +329,16 @@
                 p.focusingAreas[i].weight);
     }
 
-    result.appendFormat("    Exposure compensation index: %d\n",
+    result << fmt::sprintf("    Exposure compensation index: %d\n",
             p.exposureCompensation);
 
-    result.appendFormat("    AE lock %s, AWB lock %s\n",
+    result << fmt::sprintf("    AE lock %s, AWB lock %s\n",
             p.autoExposureLock ? "enabled" : "disabled",
             p.autoWhiteBalanceLock ? "enabled" : "disabled" );
 
-    result.appendFormat("    Metering areas:\n");
+    result << "    Metering areas:\n";
     for (size_t i = 0; i < p.meteringAreas.size(); i++) {
-        result.appendFormat("      [ (%d, %d, %d, %d), weight %d ]\n",
+        result << fmt::sprintf("      [ (%d, %d, %d, %d), weight %d ]\n",
                 p.meteringAreas[i].left,
                 p.meteringAreas[i].top,
                 p.meteringAreas[i].right,
@@ -356,54 +346,56 @@
                 p.meteringAreas[i].weight);
     }
 
-    result.appendFormat("    Zoom index: %d\n", p.zoom);
-    result.appendFormat("    Video size: %d x %d\n", p.videoWidth,
+    result << fmt::sprintf("    Zoom index: %d\n", p.zoom);
+    result << fmt::sprintf("    Video size: %d x %d\n", p.videoWidth,
             p.videoHeight);
 
-    result.appendFormat("    Recording hint is %s\n",
+    result << fmt::sprintf("    Recording hint is %s\n",
             p.recordingHint ? "set" : "not set");
 
-    result.appendFormat("    Video stabilization is %s\n",
+    result << fmt::sprintf("    Video stabilization is %s\n",
             p.videoStabilization ? "enabled" : "disabled");
 
-    result.appendFormat("    Selected still capture FPS range: %d - %d\n",
+    result << fmt::sprintf("    Selected still capture FPS range: %d - %d\n",
             p.fastInfo.bestStillCaptureFpsRange[0],
             p.fastInfo.bestStillCaptureFpsRange[1]);
 
-    result.appendFormat("    Use zero shutter lag: %s\n",
+    result << fmt::sprintf("    Use zero shutter lag: %s\n",
             p.useZeroShutterLag() ? "yes" : "no");
 
-    result.append("  Current streams:\n");
-    result.appendFormat("    Preview stream ID: %d\n",
+    result << "  Current streams:\n";
+    result << fmt::sprintf("    Preview stream ID: %d\n",
             getPreviewStreamId());
-    result.appendFormat("    Capture stream ID: %d\n",
+    result << fmt::sprintf("    Capture stream ID: %d\n",
             getCaptureStreamId());
-    result.appendFormat("    Recording stream ID: %d\n",
+    result << fmt::sprintf("    Recording stream ID: %d\n",
             getRecordingStreamId());
 
-    result.append("  Quirks for this camera:\n");
+    result << "  Quirks for this camera:\n";
     bool haveQuirk = false;
     if (p.quirks.triggerAfWithAuto) {
-        result.appendFormat("    triggerAfWithAuto\n");
+        result << "    triggerAfWithAuto\n";
         haveQuirk = true;
     }
     if (p.quirks.useZslFormat) {
-        result.appendFormat("    useZslFormat\n");
+        result << "    useZslFormat\n";
         haveQuirk = true;
     }
     if (p.quirks.meteringCropRegion) {
-        result.appendFormat("    meteringCropRegion\n");
+        result << "    meteringCropRegion\n";
         haveQuirk = true;
     }
     if (p.quirks.partialResults) {
-        result.appendFormat("    usePartialResult\n");
+        result << "    usePartialResult\n";
         haveQuirk = true;
     }
     if (!haveQuirk) {
-        result.appendFormat("    none\n");
+        result << "    none\n";
     }
 
-    write(fd, result.string(), result.size());
+    std::string resultStr = std::move(result.str());
+
+    write(fd, resultStr.c_str(), resultStr.size());
 
     mStreamingProcessor->dump(fd, args);