cameraservice: Migrate all internal String8/String16s to std::string
String8 and String16 are deprecated classes. It is recommended to use
std::string or std::u16string wherever possible. String16 is the native
string class for aidl, but Strings marked @utf8InCpp can use std::string
directly.
This patch standardizes libcameraservice's use of strings to
std::string, which is capable of storing utf-8 strings. This makes the
code more readable and potentially reduces the number of string copies
to a minimum.
A new set of string utils is added to frameworks/av/camera to aid this
migration.
Change-Id: I59330ac03c8a52b6c21a2388bba0c143e68af4cf
Merged-In: I59330ac03c8a52b6c21a2388bba0c143e68af4cf
Bug: 265487852
Test: Presubmit, ran CtsCameraTestCases on Cuttlefish, adb shell dumpsys media camera and observed output
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 5b5892a..87a4420 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -18,11 +18,14 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <sstream>
+
#include <inttypes.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <camera/CameraUtils.h>
+#include <camera/StringUtils.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
@@ -53,9 +56,9 @@
Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
- const String16& clientPackageName,
- const std::optional<String16>& clientFeatureId,
- const String8& cameraDeviceId,
+ const std::string& clientPackageName,
+ const std::optional<std::string>& clientFeatureId,
+ const std::string& cameraDeviceId,
int api1CameraId,
int cameraFacing,
int sensorOrientation,
@@ -85,7 +88,8 @@
l.mParameters.isSlowJpegModeForced = forceSlowJpegMode;
}
-status_t Camera2Client::initialize(sp<CameraProviderManager> manager, const String8& monitorTags) {
+status_t Camera2Client::initialize(sp<CameraProviderManager> manager,
+ const std::string& monitorTags) {
return initializeImpl(manager, monitorTags);
}
@@ -105,7 +109,7 @@
}
template<typename TProviderPtr>
-status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8& monitorTags)
+status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags)
{
ATRACE_CALL();
ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
@@ -137,16 +141,11 @@
CameraUtils::getRotationTransform(staticInfo, OutputConfiguration::MIRROR_MODE_AUTO,
&mRotateAndCropPreviewTransform);
- String8 threadName;
-
mStreamingProcessor = new StreamingProcessor(this);
- threadName = String8::format("C2-%d-StreamProc",
- mCameraId);
+ std::string threadName = std::string("C2-") + std::to_string(mCameraId);
mFrameProcessor = new FrameProcessor(mDevice, this);
- threadName = String8::format("C2-%d-FrameProc",
- mCameraId);
- res = mFrameProcessor->run(threadName.string());
+ res = mFrameProcessor->run((threadName + "-FrameProc").c_str());
if (res != OK) {
ALOGE("%s: Unable to start frame processor thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -154,9 +153,7 @@
}
mCaptureSequencer = new CaptureSequencer(this);
- threadName = String8::format("C2-%d-CaptureSeq",
- mCameraId);
- res = mCaptureSequencer->run(threadName.string());
+ res = mCaptureSequencer->run((threadName + "-CaptureSeq").c_str());
if (res != OK) {
ALOGE("%s: Unable to start capture sequencer thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -164,9 +161,7 @@
}
mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
- threadName = String8::format("C2-%d-JpegProc",
- mCameraId);
- res = mJpegProcessor->run(threadName.string());
+ res = mJpegProcessor->run((threadName + "-JpegProc").c_str());
if (res != OK) {
ALOGE("%s: Unable to start jpeg processor thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -174,10 +169,7 @@
}
mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
-
- threadName = String8::format("C2-%d-ZslProc",
- mCameraId);
- res = mZslProcessor->run(threadName.string());
+ res = mZslProcessor->run((threadName + "-ZslProc").c_str());
if (res != OK) {
ALOGE("%s: Unable to start zsl processor thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -185,9 +177,7 @@
}
mCallbackProcessor = new CallbackProcessor(this);
- threadName = String8::format("C2-%d-CallbkProc",
- mCameraId);
- res = mCallbackProcessor->run(threadName.string());
+ res = mCallbackProcessor->run((threadName + "-CallbkProc").c_str());
if (res != OK) {
ALOGE("%s: Unable to start callback processor thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
@@ -220,47 +210,47 @@
}
status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
- String8 result;
- result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
+ std::ostringstream result;
+ result << fmt::sprintf("Client2[%d] (%p) PID: %d, dump:\n", mCameraId,
(getRemoteCallback() != NULL ?
- (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
+ (void *) (IInterface::asBinder(getRemoteCallback()).get()) : NULL),
mClientPid);
- result.append(" State: ");
-#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break;
+ result << " State: ";
+#define CASE_APPEND_ENUM(x) case x: result << #x "\n"; break;
const Parameters& p = mParameters.unsafeAccess();
- result.append(Parameters::getStateName(p.state));
+ result << Parameters::getStateName(p.state);
- result.append("\n Current parameters:\n");
- result.appendFormat(" Preview size: %d x %d\n",
+ result << "\n Current parameters:\n";
+ result << fmt::sprintf(" Preview size: %d x %d\n",
p.previewWidth, p.previewHeight);
- result.appendFormat(" Preview FPS range: %d - %d\n",
+ result << fmt::sprintf(" Preview FPS range: %d - %d\n",
p.previewFpsRange[0], p.previewFpsRange[1]);
- result.appendFormat(" Preview HAL pixel format: 0x%x\n",
+ result << fmt::sprintf(" Preview HAL pixel format: 0x%x\n",
p.previewFormat);
- result.appendFormat(" Preview transform: %x\n",
+ result << fmt::sprintf(" Preview transform: %x\n",
p.previewTransform);
- result.appendFormat(" Picture size: %d x %d\n",
+ result << fmt::sprintf(" Picture size: %d x %d\n",
p.pictureWidth, p.pictureHeight);
- result.appendFormat(" Jpeg thumbnail size: %d x %d\n",
+ result << fmt::sprintf(" Jpeg thumbnail size: %d x %d\n",
p.jpegThumbSize[0], p.jpegThumbSize[1]);
- result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n",
+ result << fmt::sprintf(" Jpeg quality: %d, thumbnail quality: %d\n",
p.jpegQuality, p.jpegThumbQuality);
- result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation);
- result.appendFormat(" GPS tags %s\n",
+ result << fmt::sprintf(" Jpeg rotation: %d\n", p.jpegRotation);
+ result << fmt::sprintf(" GPS tags %s\n",
p.gpsEnabled ? "enabled" : "disabled");
if (p.gpsEnabled) {
- result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n",
+ result << fmt::sprintf(" GPS lat x long x alt: %f x %f x %f\n",
p.gpsCoordinates[0], p.gpsCoordinates[1],
p.gpsCoordinates[2]);
- result.appendFormat(" GPS timestamp: %" PRId64 "\n",
+ result << fmt::sprintf(" GPS timestamp: %" PRId64 "\n",
p.gpsTimestamp);
- result.appendFormat(" GPS processing method: %s\n",
+ result << fmt::sprintf(" GPS processing method: %s\n",
p.gpsProcessingMethod.string());
}
- result.append(" White balance mode: ");
+ result << " White balance mode: ";
switch (p.wbMode) {
CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO)
CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT)
@@ -270,10 +260,10 @@
CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT)
CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT)
CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Effect mode: ");
+ result << " Effect mode: ";
switch (p.effectMode) {
CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF)
CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO)
@@ -284,22 +274,22 @@
CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD)
CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD)
CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Antibanding mode: ");
+ result << " Antibanding mode: ";
switch (p.antibandingMode) {
CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO)
CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ)
CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Scene mode: ");
+ result << " Scene mode: ";
switch (p.sceneMode) {
case ANDROID_CONTROL_SCENE_MODE_DISABLED:
- result.append("AUTO\n"); break;
+ result << "AUTO\n"; break;
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
@@ -316,10 +306,10 @@
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Flash mode: ");
+ result << " Flash mode: ";
switch (p.flashMode) {
CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF)
CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO)
@@ -327,10 +317,10 @@
CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH)
CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE)
CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Focus mode: ");
+ result << " Focus mode: ";
switch (p.focusMode) {
CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO)
CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO)
@@ -340,10 +330,10 @@
CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY)
CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED)
CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Focus state: ");
+ result << " Focus state: ";
switch (p.focusState) {
CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE)
CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)
@@ -352,12 +342,12 @@
CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN)
CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED)
CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED)
- default: result.append("UNKNOWN\n");
+ default: result << "UNKNOWN\n";
}
- result.append(" Focusing areas:\n");
+ result << " Focusing areas:\n";
for (size_t i = 0; i < p.focusingAreas.size(); i++) {
- result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
+ result << fmt::sprintf(" [ (%d, %d, %d, %d), weight %d ]\n",
p.focusingAreas[i].left,
p.focusingAreas[i].top,
p.focusingAreas[i].right,
@@ -365,16 +355,16 @@
p.focusingAreas[i].weight);
}
- result.appendFormat(" Exposure compensation index: %d\n",
+ result << fmt::sprintf(" Exposure compensation index: %d\n",
p.exposureCompensation);
- result.appendFormat(" AE lock %s, AWB lock %s\n",
+ result << fmt::sprintf(" AE lock %s, AWB lock %s\n",
p.autoExposureLock ? "enabled" : "disabled",
p.autoWhiteBalanceLock ? "enabled" : "disabled" );
- result.appendFormat(" Metering areas:\n");
+ result << " Metering areas:\n";
for (size_t i = 0; i < p.meteringAreas.size(); i++) {
- result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n",
+ result << fmt::sprintf(" [ (%d, %d, %d, %d), weight %d ]\n",
p.meteringAreas[i].left,
p.meteringAreas[i].top,
p.meteringAreas[i].right,
@@ -382,54 +372,56 @@
p.meteringAreas[i].weight);
}
- result.appendFormat(" Zoom index: %d\n", p.zoom);
- result.appendFormat(" Video size: %d x %d\n", p.videoWidth,
+ result << fmt::sprintf(" Zoom index: %d\n", p.zoom);
+ result << fmt::sprintf(" Video size: %d x %d\n", p.videoWidth,
p.videoHeight);
- result.appendFormat(" Recording hint is %s\n",
+ result << fmt::sprintf(" Recording hint is %s\n",
p.recordingHint ? "set" : "not set");
- result.appendFormat(" Video stabilization is %s\n",
+ result << fmt::sprintf(" Video stabilization is %s\n",
p.videoStabilization ? "enabled" : "disabled");
- result.appendFormat(" Selected still capture FPS range: %d - %d\n",
+ result << fmt::sprintf(" Selected still capture FPS range: %d - %d\n",
p.fastInfo.bestStillCaptureFpsRange[0],
p.fastInfo.bestStillCaptureFpsRange[1]);
- result.appendFormat(" Use zero shutter lag: %s\n",
+ result << fmt::sprintf(" Use zero shutter lag: %s\n",
p.useZeroShutterLag() ? "yes" : "no");
- result.append(" Current streams:\n");
- result.appendFormat(" Preview stream ID: %d\n",
+ result << " Current streams:\n";
+ result << fmt::sprintf(" Preview stream ID: %d\n",
getPreviewStreamId());
- result.appendFormat(" Capture stream ID: %d\n",
+ result << fmt::sprintf(" Capture stream ID: %d\n",
getCaptureStreamId());
- result.appendFormat(" Recording stream ID: %d\n",
+ result << fmt::sprintf(" Recording stream ID: %d\n",
getRecordingStreamId());
- result.append(" Quirks for this camera:\n");
+ result << " Quirks for this camera:\n";
bool haveQuirk = false;
if (p.quirks.triggerAfWithAuto) {
- result.appendFormat(" triggerAfWithAuto\n");
+ result << " triggerAfWithAuto\n";
haveQuirk = true;
}
if (p.quirks.useZslFormat) {
- result.appendFormat(" useZslFormat\n");
+ result << " useZslFormat\n";
haveQuirk = true;
}
if (p.quirks.meteringCropRegion) {
- result.appendFormat(" meteringCropRegion\n");
+ result << " meteringCropRegion\n";
haveQuirk = true;
}
if (p.quirks.partialResults) {
- result.appendFormat(" usePartialResult\n");
+ result << " usePartialResult\n";
haveQuirk = true;
}
if (!haveQuirk) {
- result.appendFormat(" none\n");
+ result << " none\n";
}
- write(fd, result.string(), result.size());
+ std::string resultStr = std::move(result.str());
+
+ write(fd, resultStr.c_str(), resultStr.size());
mStreamingProcessor->dump(fd, args);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index a7ea823..9ec1eb5 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -103,9 +103,9 @@
Camera2Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper,
- const String16& clientPackageName,
- const std::optional<String16>& clientFeatureId,
- const String8& cameraDeviceId,
+ const std::string& clientPackageName,
+ const std::optional<std::string>& clientFeatureId,
+ const std::string& cameraDeviceId,
int api1CameraId,
int cameraFacing,
int sensorOrientation,
@@ -119,7 +119,7 @@
virtual ~Camera2Client();
virtual status_t initialize(sp<CameraProviderManager> manager,
- const String8& monitorTags) override;
+ const std::string& monitorTags) override;
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -244,7 +244,7 @@
status_t overrideVideoSnapshotSize(Parameters ¶ms);
template<typename TProviderPtr>
- status_t initializeImpl(TProviderPtr providerPtr, const String8& monitorTags);
+ status_t initializeImpl(TProviderPtr providerPtr, const std::string& monitorTags);
bool isZslEnabledInStillTemplate();
// The current rotate & crop mode passed by camera service
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index ee764ec..17db20b 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -158,7 +158,7 @@
res = device->createStream(mCallbackWindow,
params.previewWidth, params.previewHeight, callbackFormat,
HAL_DATASPACE_V0_JFIF, CAMERA_STREAM_ROTATION_0, &mCallbackStreamId,
- String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+ std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for callbacks: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index 4c9b7ed..0b5e03f 100644
--- a/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -23,6 +23,7 @@
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Vector.h>
+#include <camera/StringUtils.h>
#include "api1/Camera2Client.h"
#include "api1/client2/CaptureSequencer.h"
@@ -174,19 +175,19 @@
void CaptureSequencer::dump(int fd, const Vector<String16>& /*args*/) {
- String8 result;
+ std::string result;
if (mCaptureRequest.entryCount() != 0) {
result = " Capture request:\n";
- write(fd, result.string(), result.size());
+ write(fd, result.c_str(), result.size());
mCaptureRequest.dump(fd, 2, 6);
} else {
result = " Capture request: undefined\n";
- write(fd, result.string(), result.size());
+ write(fd, result.c_str(), result.size());
}
- result = String8::format(" Current capture state: %s\n",
+ result = fmt::sprintf(" Current capture state: %s\n",
kStateNames[mCaptureState]);
- result.append(" Latest captured frame:\n");
- write(fd, result.string(), result.size());
+ result += " Latest captured frame:\n";
+ write(fd, result.c_str(), result.size());
mNewFrame.dump(fd, 2, 6);
}
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 467108d..eb00bf8 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -83,7 +83,7 @@
}
// Find out buffer size for JPEG
- ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(String8("")),
+ ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(""),
params.pictureWidth, params.pictureHeight);
if (maxJpegSize <= 0) {
ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
@@ -157,7 +157,7 @@
params.pictureWidth, params.pictureHeight,
HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
- String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+ std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for capture: "
"%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 2d3597c..ff71e6b 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -31,6 +31,7 @@
#include <gui/BufferItem.h>
#include <gui/Surface.h>
#include <media/hardware/HardwareAPI.h>
+#include <camera/StringUtils.h>
#include "common/CameraDeviceBase.h"
#include "api1/Camera2Client.h"
@@ -198,7 +199,7 @@
res = device->createStream(mPreviewWindow,
params.previewWidth, params.previewHeight,
CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_UNKNOWN,
- CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, String8(),
+ CAMERA_STREAM_ROTATION_0, &mPreviewStreamId, std::string(),
std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)",
@@ -385,7 +386,7 @@
params.videoWidth, params.videoHeight,
params.videoFormat, params.videoDataSpace,
CAMERA_STREAM_ROTATION_0, &mRecordingStreamId,
- String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+ std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
ALOGE("%s: Camera %d: Can't create output stream for recording: "
"%s (%d)", __FUNCTION__, mId,
@@ -585,21 +586,21 @@
}
status_t StreamingProcessor::dump(int fd, const Vector<String16>& /*args*/) {
- String8 result;
+ std::string result;
- result.append(" Current requests:\n");
+ result += " Current requests:\n";
if (mPreviewRequest.entryCount() != 0) {
- result.append(" Preview request:\n");
- write(fd, result.string(), result.size());
+ result += " Preview request:\n";
+ write(fd, result.c_str(), result.size());
mPreviewRequest.dump(fd, 2, 6);
result.clear();
} else {
- result.append(" Preview request: undefined\n");
+ result += " Preview request: undefined\n";
}
if (mRecordingRequest.entryCount() != 0) {
result = " Recording request:\n";
- write(fd, result.string(), result.size());
+ write(fd, result.c_str(), result.size());
mRecordingRequest.dump(fd, 2, 6);
result.clear();
} else {
@@ -609,11 +610,11 @@
const char* streamTypeString[] = {
"none", "preview", "record"
};
- result.append(String8::format(" Active request: %s (paused: %s)\n",
- streamTypeString[mActiveRequest],
- mPaused ? "yes" : "no"));
+ result += fmt::sprintf(" Active request: %s (paused: %s\n",
+ streamTypeString[mActiveRequest],
+ mPaused ? "yes" : "no");
- write(fd, result.string(), result.size());
+ write(fd, result.c_str(), result.size());
return OK;
}
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 1321e6b..d6c2415 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -30,6 +30,7 @@
#include <utils/Log.h>
#include <utils/Trace.h>
#include <gui/Surface.h>
+#include <camera/StringUtils.h>
#include "common/CameraDeviceBase.h"
#include "api1/Camera2Client.h"
@@ -255,13 +256,13 @@
BufferQueue::createBufferQueue(&producer, &consumer);
mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
mBufferQueueDepth);
- mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
+ mProducer->setName("Camera2-ZslRingBufferConsumer");
sp<Surface> outSurface = new Surface(producer);
res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
- String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
+ std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
ALOGE("%s: Camera %d: Can't create ZSL stream: "
"%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -680,12 +681,12 @@
void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
Mutex::Autolock l(mInputMutex);
if (!mLatestCapturedRequest.isEmpty()) {
- String8 result(" Latest ZSL capture request:\n");
- write(fd, result.string(), result.size());
+ std::string result = " Latest ZSL capture request:\n";
+ write(fd, result.c_str(), result.size());
mLatestCapturedRequest.dump(fd, 2, 6);
} else {
- String8 result(" Latest ZSL capture request: none yet\n");
- write(fd, result.string(), result.size());
+ std::string result = " Latest ZSL capture request: none yet\n";
+ write(fd, result.c_str(), result.size());
}
dumpZslQueue(fd);
}
@@ -706,12 +707,12 @@
}
void ZslProcessor::dumpZslQueue(int fd) const {
- String8 header("ZSL queue contents:");
- String8 indent(" ");
- ALOGV("%s", header.string());
+ std::string header = "ZSL queue contents:";
+ std::string indent = " ";
+ ALOGV("%s", header.c_str());
if (fd != -1) {
header = indent + header + "\n";
- write(fd, header.string(), header.size());
+ write(fd, header.c_str(), header.size());
}
for (size_t i = 0; i < mZslQueue.size(); i++) {
const ZslPair &queueEntry = mZslQueue[i];
@@ -725,13 +726,13 @@
entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
if (entry.count > 0) frameAeState = entry.data.u8[0];
}
- String8 result =
- String8::format(" %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
+ std::string result =
+ fmt::sprintf(" %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
bufferTimestamp, frameTimestamp, frameAeState);
- ALOGV("%s", result.string());
+ ALOGV("%s", result.c_str());
if (fd != -1) {
result = indent + result + "\n";
- write(fd, result.string(), result.size());
+ write(fd, result.c_str(), result.size());
}
}