Merge "audiopolicy: add Volume Group introspection APIs"
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index de40990..359eaed 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -38,6 +38,8 @@
filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS);
filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS);
+ filterDurations(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS);
+ filterDurations(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS);
}
// TODO: filter request/result keys
}
@@ -186,6 +188,16 @@
filteredDurations.push_back(duration);
}
break;
+ case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
+ case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
+ if (format == HAL_PIXEL_FORMAT_BLOB) {
+ format = AIMAGE_FORMAT_DEPTH_JPEG;
+ filteredDurations.push_back(format);
+ filteredDurations.push_back(width);
+ filteredDurations.push_back(height);
+ filteredDurations.push_back(duration);
+ }
+ break;
default:
// Should not reach here
ALOGE("%s: Unkown tag 0x%x", __FUNCTION__, tag);
@@ -284,6 +296,32 @@
filteredHeicStreamConfigs.push_back(isInput);
}
mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
+
+ entry = mData.find(ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS);
+ Vector<int32_t> filteredDynamicDepthStreamConfigs;
+ filteredDynamicDepthStreamConfigs.setCapacity(entry.count);
+
+ for (size_t i = 0; i < entry.count; i += STREAM_CONFIGURATION_SIZE) {
+ int32_t format = entry.data.i32[i + STREAM_FORMAT_OFFSET];
+ int32_t width = entry.data.i32[i + STREAM_WIDTH_OFFSET];
+ int32_t height = entry.data.i32[i + STREAM_HEIGHT_OFFSET];
+ int32_t isInput = entry.data.i32[i + STREAM_IS_INPUT_OFFSET];
+ if (isInput == ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_INPUT) {
+ // Hide input streams
+ continue;
+ }
+ // Translate HAL formats to NDK format
+ if (format == HAL_PIXEL_FORMAT_BLOB) {
+ format = AIMAGE_FORMAT_DEPTH_JPEG;
+ }
+
+ filteredDynamicDepthStreamConfigs.push_back(format);
+ filteredDynamicDepthStreamConfigs.push_back(width);
+ filteredDynamicDepthStreamConfigs.push_back(height);
+ filteredDynamicDepthStreamConfigs.push_back(isInput);
+ }
+ mData.update(ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
+ filteredDynamicDepthStreamConfigs);
}
bool
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index dce3222..2d10c67 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -709,6 +709,49 @@
Mutexed<Config>::Locked config(mConfig);
config->mUsingSurface = surface != nullptr;
+ // Enforce required parameters
+ int32_t i32;
+ float flt;
+ if (config->mDomain & Config::IS_AUDIO) {
+ if (!msg->findInt32(KEY_SAMPLE_RATE, &i32)) {
+ ALOGD("sample rate is missing, which is required for audio components.");
+ return BAD_VALUE;
+ }
+ if (!msg->findInt32(KEY_CHANNEL_COUNT, &i32)) {
+ ALOGD("channel count is missing, which is required for audio components.");
+ return BAD_VALUE;
+ }
+ if ((config->mDomain & Config::IS_ENCODER)
+ && !mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_FLAC)
+ && !msg->findInt32(KEY_BIT_RATE, &i32)
+ && !msg->findFloat(KEY_BIT_RATE, &flt)) {
+ ALOGD("bitrate is missing, which is required for audio encoders.");
+ return BAD_VALUE;
+ }
+ }
+ if (config->mDomain & (Config::IS_IMAGE | Config::IS_VIDEO)) {
+ if (!msg->findInt32(KEY_WIDTH, &i32)) {
+ ALOGD("width is missing, which is required for image/video components.");
+ return BAD_VALUE;
+ }
+ if (!msg->findInt32(KEY_HEIGHT, &i32)) {
+ ALOGD("height is missing, which is required for image/video components.");
+ return BAD_VALUE;
+ }
+ if ((config->mDomain & Config::IS_ENCODER) && (config->mDomain & Config::IS_VIDEO)) {
+ if (!msg->findInt32(KEY_BIT_RATE, &i32)
+ && !msg->findFloat(KEY_BIT_RATE, &flt)) {
+ ALOGD("bitrate is missing, which is required for video encoders.");
+ return BAD_VALUE;
+ }
+ if (!msg->findInt32(KEY_I_FRAME_INTERVAL, &i32)
+ && !msg->findFloat(KEY_I_FRAME_INTERVAL, &flt)) {
+ ALOGD("I frame interval is missing, which is required for video encoders.");
+ return BAD_VALUE;
+ }
+ }
+ }
+
/*
* Handle input surface configuration
*/
@@ -718,13 +761,14 @@
{
config->mISConfig->mMinFps = 0;
int64_t value;
- if (msg->findInt64("repeat-previous-frame-after", &value) && value > 0) {
+ if (msg->findInt64(KEY_REPEAT_PREVIOUS_FRAME_AFTER, &value) && value > 0) {
config->mISConfig->mMinFps = 1e6 / value;
}
- (void)msg->findFloat("max-fps-to-encoder", &config->mISConfig->mMaxFps);
+ (void)msg->findFloat(
+ KEY_MAX_FPS_TO_ENCODER, &config->mISConfig->mMaxFps);
config->mISConfig->mMinAdjustedFps = 0;
config->mISConfig->mFixedAdjustedFps = 0;
- if (msg->findInt64("max-pts-gap-to-encoder", &value)) {
+ if (msg->findInt64(KEY_MAX_PTS_GAP_TO_ENCODER, &value)) {
if (value < 0 && value >= INT32_MIN) {
config->mISConfig->mFixedAdjustedFps = -1e6 / value;
} else if (value > 0 && value <= INT32_MAX) {
@@ -745,7 +789,7 @@
config->mISConfig->mSuspended = false;
config->mISConfig->mSuspendAtUs = -1;
int32_t value;
- if (msg->findInt32("create-input-buffers-suspended", &value) && value) {
+ if (msg->findInt32(KEY_CREATE_INPUT_SURFACE_SUSPENDED, &value) && value) {
config->mISConfig->mSuspended = true;
}
}
@@ -1453,7 +1497,7 @@
*/
if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))
&& (config->mDomain & Config::IS_ENCODER) && config->mInputSurface && config->mISConfig) {
- (void)params->findInt64("time-offset-us", &config->mISConfig->mTimeOffsetUs);
+ (void)params->findInt64(PARAMETER_KEY_OFFSET_TIME, &config->mISConfig->mTimeOffsetUs);
if (params->findInt64("skip-frames-before", &config->mISConfig->mStartAtUs)) {
config->mISConfig->mStopped = false;
@@ -1462,10 +1506,10 @@
}
int32_t value;
- if (params->findInt32("drop-input-frames", &value)) {
+ if (params->findInt32(PARAMETER_KEY_SUSPEND, &value)) {
config->mISConfig->mSuspended = value;
config->mISConfig->mSuspendAtUs = -1;
- (void)params->findInt64("drop-start-time-us", &config->mISConfig->mSuspendAtUs);
+ (void)params->findInt64(PARAMETER_KEY_SUSPEND_TIME, &config->mISConfig->mSuspendAtUs);
}
(void)config->mInputSurface->configure(*config->mISConfig);
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 736aac5..d22153d 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -140,6 +140,7 @@
protected:
class Impl;
+ class ImplV2;
Impl *mImpl;
// TODO: we could make this encapsulate shared_ptr and copiable
@@ -147,7 +148,7 @@
};
class C2AllocationIon::Impl {
-private:
+protected:
/**
* Constructs an ion allocation.
*
@@ -191,11 +192,7 @@
* \return created ion allocation (implementation) which may be invalid if the
* import failed.
*/
- static Impl *Import(int ionFd, size_t capacity, int bufferFd, C2Allocator::id_t id) {
- ion_user_handle_t buffer = -1;
- int ret = ion_import(ionFd, bufferFd, &buffer);
- return new Impl(ionFd, capacity, bufferFd, buffer, id, ret);
- }
+ static Impl *Import(int ionFd, size_t capacity, int bufferFd, C2Allocator::id_t id);
/**
* Constructs an ion allocation by allocating an ion buffer.
@@ -209,24 +206,7 @@
* \return created ion allocation (implementation) which may be invalid if the
* allocation failed.
*/
- static Impl *Alloc(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
- int bufferFd = -1;
- ion_user_handle_t buffer = -1;
- size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
- int ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
- ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
- "returned (%d) ; buffer = %d",
- ionFd, alignedSize, align, heapMask, flags, ret, buffer);
- if (ret == 0) {
- // get buffer fd for native handle constructor
- ret = ion_share(ionFd, buffer, &bufferFd);
- if (ret != 0) {
- ion_free(ionFd, buffer);
- buffer = -1;
- }
- }
- return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
- }
+ static Impl *Alloc(int ionFd, size_t size, size_t align, unsigned heapMask, unsigned flags, C2Allocator::id_t id);
c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence *fence, void **addr) {
(void)fence; // TODO: wait for fence
@@ -256,32 +236,7 @@
size_t mapSize = size + alignmentBytes;
Mapping map = { nullptr, alignmentBytes, mapSize };
- c2_status_t err = C2_OK;
- if (mMapFd == -1) {
- int ret = ion_map(mIonFd, mBuffer, mapSize, prot,
- flags, mapOffset, (unsigned char**)&map.addr, &mMapFd);
- ALOGV("ion_map(ionFd = %d, handle = %d, size = %zu, prot = %d, flags = %d, "
- "offset = %zu) returned (%d)",
- mIonFd, mBuffer, mapSize, prot, flags, mapOffset, ret);
- if (ret) {
- mMapFd = -1;
- map.addr = *addr = nullptr;
- err = c2_map_errno<EINVAL>(-ret);
- } else {
- *addr = (uint8_t *)map.addr + alignmentBytes;
- }
- } else {
- map.addr = mmap(nullptr, mapSize, prot, flags, mMapFd, mapOffset);
- ALOGV("mmap(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
- "returned (%d)",
- mapSize, prot, flags, mMapFd, mapOffset, errno);
- if (map.addr == MAP_FAILED) {
- map.addr = *addr = nullptr;
- err = c2_map_errno<EINVAL>(errno);
- } else {
- *addr = (uint8_t *)map.addr + alignmentBytes;
- }
- }
+ c2_status_t err = mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
if (map.addr) {
mMappings.push_back(map);
}
@@ -289,7 +244,7 @@
}
c2_status_t unmap(void *addr, size_t size, C2Fence *fence) {
- if (mMapFd < 0 || mMappings.empty()) {
+ if (mMappings.empty()) {
ALOGD("tried to unmap unmapped buffer");
return C2_NOT_FOUND;
}
@@ -307,14 +262,14 @@
*fence = C2Fence(); // not using fences
}
(void)mMappings.erase(it);
- ALOGV("successfully unmapped: %d", mBuffer);
+ ALOGV("successfully unmapped: %d", mHandle.bufferFd());
return C2_OK;
}
ALOGD("unmap failed to find specified map");
return C2_BAD_VALUE;
}
- ~Impl() {
+ virtual ~Impl() {
if (!mMappings.empty()) {
ALOGD("Dangling mappings!");
for (const Mapping &map : mMappings) {
@@ -326,7 +281,9 @@
mMapFd = -1;
}
if (mInit == C2_OK) {
- (void)ion_free(mIonFd, mBuffer);
+ if (mBuffer >= 0) {
+ (void)ion_free(mIonFd, mBuffer);
+ }
native_handle_close(&mHandle);
}
if (mIonFd >= 0) {
@@ -346,11 +303,42 @@
return mId;
}
- ion_user_handle_t ionHandle() const {
+ virtual ion_user_handle_t ionHandle() const {
return mBuffer;
}
-private:
+protected:
+ virtual c2_status_t mapInternal(size_t mapSize, size_t mapOffset, size_t alignmentBytes,
+ int prot, int flags, void** base, void** addr) {
+ c2_status_t err = C2_OK;
+ if (mMapFd == -1) {
+ int ret = ion_map(mIonFd, mBuffer, mapSize, prot,
+ flags, mapOffset, (unsigned char**)base, &mMapFd);
+ ALOGV("ion_map(ionFd = %d, handle = %d, size = %zu, prot = %d, flags = %d, "
+ "offset = %zu) returned (%d)",
+ mIonFd, mBuffer, mapSize, prot, flags, mapOffset, ret);
+ if (ret) {
+ mMapFd = -1;
+ *base = *addr = nullptr;
+ err = c2_map_errno<EINVAL>(-ret);
+ } else {
+ *addr = (uint8_t *)*base + alignmentBytes;
+ }
+ } else {
+ *base = mmap(nullptr, mapSize, prot, flags, mMapFd, mapOffset);
+ ALOGV("mmap(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
+ "returned (%d)",
+ mapSize, prot, flags, mMapFd, mapOffset, errno);
+ if (*base == MAP_FAILED) {
+ *base = *addr = nullptr;
+ err = c2_map_errno<EINVAL>(errno);
+ } else {
+ *addr = (uint8_t *)*base + alignmentBytes;
+ }
+ }
+ return err;
+ }
+
int mIonFd;
C2HandleIon mHandle;
ion_user_handle_t mBuffer;
@@ -365,6 +353,93 @@
std::list<Mapping> mMappings;
};
+class C2AllocationIon::ImplV2 : public C2AllocationIon::Impl {
+public:
+ /**
+ * Constructs an ion allocation for platforms with new (ion_4.12.h) api
+ *
+ * \note We always create an ion allocation, even if the allocation or import fails
+ * so that we can capture the error.
+ *
+ * \param ionFd ion client (ownership transferred to created object)
+ * \param capacity size of allocation
+ * \param bufferFd buffer handle (ownership transferred to created object). Must be
+ * invalid if err is not 0.
+ * \param err errno during buffer allocation or import
+ */
+ ImplV2(int ionFd, size_t capacity, int bufferFd, C2Allocator::id_t id, int err)
+ : Impl(ionFd, capacity, bufferFd, -1 /*buffer*/, id, err) {
+ }
+
+ virtual ~ImplV2() = default;
+
+ virtual ion_user_handle_t ionHandle() const {
+ return mHandle.bufferFd();
+ }
+
+protected:
+ virtual c2_status_t mapInternal(size_t mapSize, size_t mapOffset, size_t alignmentBytes,
+ int prot, int flags, void** base, void** addr) {
+ c2_status_t err = C2_OK;
+ *base = mmap(nullptr, mapSize, prot, flags, mHandle.bufferFd(), mapOffset);
+ ALOGV("mmapV2(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
+ "returned (%d)",
+ mapSize, prot, flags, mHandle.bufferFd(), mapOffset, errno);
+ if (*base == MAP_FAILED) {
+ *base = *addr = nullptr;
+ err = c2_map_errno<EINVAL>(errno);
+ } else {
+ *addr = (uint8_t *)*base + alignmentBytes;
+ }
+ return err;
+ }
+
+};
+
+C2AllocationIon::Impl *C2AllocationIon::Impl::Import(int ionFd, size_t capacity, int bufferFd,
+ C2Allocator::id_t id) {
+ int ret = 0;
+ if (ion_is_legacy(ionFd)) {
+ ion_user_handle_t buffer = -1;
+ ret = ion_import(ionFd, bufferFd, &buffer);
+ return new Impl(ionFd, capacity, bufferFd, buffer, id, ret);
+ } else {
+ return new ImplV2(ionFd, capacity, bufferFd, id, ret);
+ }
+}
+
+C2AllocationIon::Impl *C2AllocationIon::Impl::Alloc(int ionFd, size_t size, size_t align,
+ unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
+ int bufferFd = -1;
+ ion_user_handle_t buffer = -1;
+ size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
+ int ret;
+
+ if (ion_is_legacy(ionFd)) {
+ ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
+ ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
+ "returned (%d) ; buffer = %d",
+ ionFd, alignedSize, align, heapMask, flags, ret, buffer);
+ if (ret == 0) {
+ // get buffer fd for native handle constructor
+ ret = ion_share(ionFd, buffer, &bufferFd);
+ if (ret != 0) {
+ ion_free(ionFd, buffer);
+ buffer = -1;
+ }
+ }
+ return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
+
+ } else {
+ ret = ion_alloc_fd(ionFd, alignedSize, align, heapMask, flags, &bufferFd);
+ ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
+ "returned (%d) ; bufferFd = %d",
+ ionFd, alignedSize, align, heapMask, flags, ret, bufferFd);
+
+ return new ImplV2(ionFd, alignedSize, bufferFd, id, ret);
+ }
+}
+
c2_status_t C2AllocationIon::map(
size_t offset, size_t size, C2MemoryUsage usage, C2Fence *fence, void **addr) {
return mImpl->map(offset, size, usage, fence, addr);
diff --git a/media/libmedia/NdkWrapper.cpp b/media/libmedia/NdkWrapper.cpp
index cbd64bb..ea0547c 100644
--- a/media/libmedia/NdkWrapper.cpp
+++ b/media/libmedia/NdkWrapper.cpp
@@ -96,6 +96,7 @@
static const char *AMediaFormatKeyGroupInt64[] = {
AMEDIAFORMAT_KEY_DURATION,
+ AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER,
AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER,
AMEDIAFORMAT_KEY_TIME_US,
};
@@ -127,6 +128,7 @@
static const char *AMediaFormatKeyGroupFloatInt32[] = {
AMEDIAFORMAT_KEY_FRAME_RATE,
AMEDIAFORMAT_KEY_I_FRAME_INTERVAL,
+ AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER,
AMEDIAFORMAT_KEY_OPERATING_RATE,
};
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index 8dac91a..0301b21 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -415,14 +415,6 @@
OutputDeviceConverter::fromString(literalDevice, device);
}
-bool deviceToString(audio_devices_t device, std::string& literalDevice) {
- if (device & AUDIO_DEVICE_BIT_IN) {
- return InputDeviceConverter::toString(device, literalDevice);
- } else {
- return OutputDeviceConverter::toString(device, literalDevice);
- }
-}
-
SampleRateTraits::Collection samplingRatesFromString(
const std::string &samplingRates, const char *del)
{
diff --git a/media/libmedia/include/media/TypeConverter.h b/media/libmedia/include/media/TypeConverter.h
index 418e09c..3acfe98 100644
--- a/media/libmedia/include/media/TypeConverter.h
+++ b/media/libmedia/include/media/TypeConverter.h
@@ -233,8 +233,6 @@
bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
-bool deviceToString(audio_devices_t device, std::string& literalDevice);
-
SampleRateTraits::Collection samplingRatesFromString(
const std::string &samplingRates, const char *del = AudioParameter::valueListSeparator);
@@ -255,47 +253,53 @@
OutputChannelTraits::Collection outputChannelMasksFromString(
const std::string &outChannels, const char *del = AudioParameter::valueListSeparator);
-static inline std::string toString(audio_usage_t usage)
+// counting enumerations
+template <typename T, std::enable_if_t<std::is_same<T, audio_content_type_t>::value
+ || std::is_same<T, audio_mode_t>::value
+ || std::is_same<T, audio_source_t>::value
+ || std::is_same<T, audio_stream_type_t>::value
+ || std::is_same<T, audio_usage_t>::value
+ , int> = 0>
+static inline std::string toString(const T& value)
{
- std::string usageLiteral;
- if (!android::UsageTypeConverter::toString(usage, usageLiteral)) {
- ALOGV("failed to convert usage: %d", usage);
- return "AUDIO_USAGE_UNKNOWN";
- }
- return usageLiteral;
+ std::string result;
+ return TypeConverter<DefaultTraits<T>>::toString(value, result)
+ ? result : std::to_string(static_cast<int>(value));
+
}
-static inline std::string toString(audio_content_type_t content)
+// flag enumerations
+template <typename T, std::enable_if_t<std::is_same<T, audio_gain_mode_t>::value
+ || std::is_same<T, audio_input_flags_t>::value
+ || std::is_same<T, audio_output_flags_t>::value
+ , int> = 0>
+static inline std::string toString(const T& value)
{
- std::string contentLiteral;
- if (!android::AudioContentTypeConverter::toString(content, contentLiteral)) {
- ALOGV("failed to convert content type: %d", content);
- return "AUDIO_CONTENT_TYPE_UNKNOWN";
- }
- return contentLiteral;
+ std::string result;
+ TypeConverter<DefaultTraits<T>>::maskToString(value, result);
+ return result;
}
-static inline std::string toString(audio_stream_type_t stream)
+static inline std::string toString(const audio_devices_t& devices)
{
- std::string streamLiteral;
- if (!android::StreamTypeConverter::toString(stream, streamLiteral)) {
- ALOGV("failed to convert stream: %d", stream);
- return "AUDIO_STREAM_DEFAULT";
+ std::string result;
+ if ((devices & AUDIO_DEVICE_BIT_IN) != 0) {
+ InputDeviceConverter::maskToString(devices, result);
+ } else {
+ OutputDeviceConverter::maskToString(devices, result);
}
- return streamLiteral;
+ return result;
}
-static inline std::string toString(audio_source_t source)
+// TODO: Remove when FormatTraits uses DefaultTraits.
+static inline std::string toString(const audio_format_t& format)
{
- std::string sourceLiteral;
- if (!android::SourceTypeConverter::toString(source, sourceLiteral)) {
- ALOGV("failed to convert source: %d", source);
- return "AUDIO_SOURCE_DEFAULT";
- }
- return sourceLiteral;
+ std::string result;
+ return TypeConverter<VectorTraits<audio_format_t>>::toString(format, result)
+ ? result : std::to_string(static_cast<int>(format));
}
-static inline std::string toString(const audio_attributes_t &attributes)
+static inline std::string toString(const audio_attributes_t& attributes)
{
std::ostringstream result;
result << "{ Content type: " << toString(attributes.content_type)
@@ -308,16 +312,6 @@
return result.str();
}
-static inline std::string toString(audio_mode_t mode)
-{
- std::string modeLiteral;
- if (!android::AudioModeConverter::toString(mode, modeLiteral)) {
- ALOGV("failed to convert mode: %d", mode);
- return "AUDIO_MODE_INVALID";
- }
- return modeLiteral;
-}
-
}; // namespace android
#endif /*ANDROID_TYPE_CONVERTER_H_*/
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 22fa495..46a1c24 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -64,8 +64,6 @@
name: "libmediaplayerservice",
- compile_multilib: "32",
-
sanitize: {
cfi: true,
},
diff --git a/media/libmediaplayerservice/tests/Android.bp b/media/libmediaplayerservice/tests/Android.bp
index 4749a8b..f8c89e5 100644
--- a/media/libmediaplayerservice/tests/Android.bp
+++ b/media/libmediaplayerservice/tests/Android.bp
@@ -14,8 +14,6 @@
"android.hardware.drm@1.2",
],
- compile_multilib: "32",
-
cflags: [
"-Werror",
"-Wall",
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9d3338b..1dee4f7 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -37,6 +37,7 @@
#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/PersistentSurface.h>
@@ -1817,20 +1818,19 @@
}
if (!msg->findInt64(
- "repeat-previous-frame-after",
- &mRepeatFrameDelayUs)) {
+ KEY_REPEAT_PREVIOUS_FRAME_AFTER, &mRepeatFrameDelayUs)) {
mRepeatFrameDelayUs = -1LL;
}
// only allow 32-bit value, since we pass it as U32 to OMX.
- if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
+ if (!msg->findInt64(KEY_MAX_PTS_GAP_TO_ENCODER, &mMaxPtsGapUs)) {
mMaxPtsGapUs = 0LL;
} else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < INT32_MIN) {
ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs);
mMaxPtsGapUs = 0LL;
}
- if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
+ if (!msg->findFloat(KEY_MAX_FPS_TO_ENCODER, &mMaxFps)) {
mMaxFps = -1;
}
@@ -1844,8 +1844,8 @@
}
if (!msg->findInt32(
- "create-input-buffers-suspended",
- (int32_t*)&mCreateInputBuffersSuspended)) {
+ KEY_CREATE_INPUT_SURFACE_SUSPENDED,
+ (int32_t*)&mCreateInputBuffersSuspended)) {
mCreateInputBuffersSuspended = false;
}
}
@@ -7431,7 +7431,7 @@
}
int64_t timeOffsetUs;
- if (params->findInt64("time-offset-us", &timeOffsetUs)) {
+ if (params->findInt64(PARAMETER_KEY_OFFSET_TIME, &timeOffsetUs)) {
if (mGraphicBufferSource == NULL) {
ALOGE("[%s] Invalid to set input buffer time offset without surface",
mComponentName.c_str());
@@ -7467,7 +7467,7 @@
}
int32_t dropInputFrames;
- if (params->findInt32("drop-input-frames", &dropInputFrames)) {
+ if (params->findInt32(PARAMETER_KEY_SUSPEND, &dropInputFrames)) {
if (mGraphicBufferSource == NULL) {
ALOGE("[%s] Invalid to set suspend without surface",
mComponentName.c_str());
@@ -7475,7 +7475,7 @@
}
int64_t suspendStartTimeUs = -1;
- (void) params->findInt64("drop-start-time-us", &suspendStartTimeUs);
+ (void) params->findInt64(PARAMETER_KEY_SUSPEND_TIME, &suspendStartTimeUs);
status_t err = statusFromBinderStatus(
mGraphicBufferSource->setSuspend(dropInputFrames != 0, suspendStartTimeUs));
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 5d2291f..c3d85ee 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -32,6 +32,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaCodecSource.h>
#include <media/stagefright/MediaErrors.h>
@@ -362,7 +363,7 @@
status_t MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
- msg->setInt64("time-offset-us", timeOffsetUs);
+ msg->setInt64(PARAMETER_KEY_OFFSET_TIME, timeOffsetUs);
return postSynchronouslyAndReturnError(msg);
}
@@ -490,7 +491,7 @@
mCodecLooper->start();
if (mFlags & FLAG_USE_SURFACE_INPUT) {
- mOutputFormat->setInt32("create-input-buffers-suspended", 1);
+ mOutputFormat->setInt32(KEY_CREATE_INPUT_SURFACE_SUSPENDED, 1);
}
AString outputMIME;
@@ -677,9 +678,9 @@
CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
if (mEncoder != NULL) {
sp<AMessage> params = new AMessage;
- params->setInt32("drop-input-frames", false);
+ params->setInt32(PARAMETER_KEY_SUSPEND, false);
if (resumeStartTimeUs > 0) {
- params->setInt64("drop-start-time-us", resumeStartTimeUs);
+ params->setInt64(PARAMETER_KEY_SUSPEND_TIME, resumeStartTimeUs);
}
mEncoder->setParameters(params);
}
@@ -799,7 +800,7 @@
if (mFlags & FLAG_USE_SURFACE_INPUT) {
if (mEncoder != NULL) {
sp<AMessage> params = new AMessage;
- params->setInt32("drop-input-frames", false);
+ params->setInt32(PARAMETER_KEY_SUSPEND, false);
if (startTimeUs >= 0) {
params->setInt64("skip-frames-before", startTimeUs);
}
@@ -832,8 +833,8 @@
void MediaCodecSource::onPause(int64_t pauseStartTimeUs) {
if ((mFlags & FLAG_USE_SURFACE_INPUT) && (mEncoder != NULL)) {
sp<AMessage> params = new AMessage;
- params->setInt32("drop-input-frames", true);
- params->setInt64("drop-start-time-us", pauseStartTimeUs);
+ params->setInt32(PARAMETER_KEY_SUSPEND, true);
+ params->setInt64(PARAMETER_KEY_SUSPEND_TIME, pauseStartTimeUs);
mEncoder->setParameters(params);
} else {
CHECK(mPuller != NULL);
@@ -1096,12 +1097,12 @@
sp<AReplyToken> replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
status_t err = OK;
- CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
+ CHECK(msg->findInt64(PARAMETER_KEY_OFFSET_TIME, &mInputBufferTimeOffsetUs));
// Propagate the timestamp offset to GraphicBufferSource.
if (mFlags & FLAG_USE_SURFACE_INPUT) {
sp<AMessage> params = new AMessage;
- params->setInt64("time-offset-us", mInputBufferTimeOffsetUs);
+ params->setInt64(PARAMETER_KEY_OFFSET_TIME, mInputBufferTimeOffsetUs);
err = mEncoder->setParameters(params);
}
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 16c7be9..f01947a 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -21,7 +21,6 @@
"libutils",
"libbinder",
"libandroidicu",
- "android.hardware.media.omx@1.0",
],
static_libs: [
@@ -34,8 +33,6 @@
"frameworks/av/services/mediaresourcemanager",
],
- compile_multilib: "32",
-
init_rc: ["mediaserver.rc"],
cflags: [
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index b929f7f..b010aa9 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -70,6 +70,7 @@
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
case AIMAGE_FORMAT_Y8:
case AIMAGE_FORMAT_HEIC:
+ case AIMAGE_FORMAT_DEPTH_JPEG:
return true;
case AIMAGE_FORMAT_PRIVATE:
// For private format, cpu usage is prohibited.
@@ -98,6 +99,7 @@
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
case AIMAGE_FORMAT_Y8:
case AIMAGE_FORMAT_HEIC:
+ case AIMAGE_FORMAT_DEPTH_JPEG:
return 1;
case AIMAGE_FORMAT_PRIVATE:
return 0;
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index 26a6238..7cc7f16 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -344,8 +344,10 @@
EXPORT const char* AMEDIAFORMAT_KEY_LYRICIST = "lyricist";
EXPORT const char* AMEDIAFORMAT_KEY_MANUFACTURER = "manufacturer";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE = "max-bitrate";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER = "max-fps-to-encoder";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER = "max-pts-gap-to-encoder";
EXPORT const char* AMEDIAFORMAT_KEY_MAX_WIDTH = "max-width";
EXPORT const char* AMEDIAFORMAT_KEY_MIME = "mime";
EXPORT const char* AMEDIAFORMAT_KEY_MPEG_USER_DATA = "mpeg-user-data";
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 14d88cb..3e60de0 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -535,6 +535,15 @@
* Format as described in ISO/IEC 23008-12.</p>
*/
AIMAGE_FORMAT_HEIC = 0x48454946,
+
+ /**
+ * Depth augmented compressed JPEG format.
+ *
+ * <p>JPEG compressed main image along with XMP embedded depth metadata
+ * following ISO 16684-1:2011(E).</p>
+ */
+ AIMAGE_FORMAT_DEPTH_JPEG = 0x69656963,
+
};
/**
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index ddf5291..56bcaab 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -216,6 +216,8 @@
extern const char* AMEDIAFORMAT_KEY_LYRICIST __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MANUFACTURER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MAX_BIT_RATE __INTRODUCED_IN(29);
+extern const char* AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER __INTRODUCED_IN(29);
+extern const char* AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_MPEG2_STREAM_HEADER __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_PCM_BIG_ENDIAN __INTRODUCED_IN(29);
extern const char* AMEDIAFORMAT_KEY_PSSH __INTRODUCED_IN(29);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 7bdd3ad..9756926 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -107,8 +107,10 @@
AMEDIAFORMAT_KEY_LYRICIST; # var introduced=29
AMEDIAFORMAT_KEY_MANUFACTURER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_BIT_RATE; # var introduced=29
+ AMEDIAFORMAT_KEY_MAX_FPS_TO_ENCODER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var introduced=21
+ AMEDIAFORMAT_KEY_MAX_PTS_GAP_TO_ENCODER; # var introduced=29
AMEDIAFORMAT_KEY_MAX_WIDTH; # var introduced=21
AMEDIAFORMAT_KEY_MIME; # var introduced=21
AMEDIAFORMAT_KEY_MPEG_USER_DATA; # var introduced=28
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index ff33957..8a45fc2 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -457,52 +457,6 @@
}
}
-std::string devicesToString(audio_devices_t devices)
-{
- std::string result;
- if (devices & AUDIO_DEVICE_BIT_IN) {
- InputDeviceConverter::maskToString(devices, result);
- } else {
- OutputDeviceConverter::maskToString(devices, result);
- }
- return result;
-}
-
-std::string inputFlagsToString(audio_input_flags_t flags)
-{
- std::string result;
- InputFlagConverter::maskToString(flags, result);
- return result;
-}
-
-std::string outputFlagsToString(audio_output_flags_t flags)
-{
- std::string result;
- OutputFlagConverter::maskToString(flags, result);
- return result;
-}
-
-const char *sourceToString(audio_source_t source)
-{
- switch (source) {
- case AUDIO_SOURCE_DEFAULT: return "default";
- case AUDIO_SOURCE_MIC: return "mic";
- case AUDIO_SOURCE_VOICE_UPLINK: return "voice uplink";
- case AUDIO_SOURCE_VOICE_DOWNLINK: return "voice downlink";
- case AUDIO_SOURCE_VOICE_CALL: return "voice call";
- case AUDIO_SOURCE_CAMCORDER: return "camcorder";
- case AUDIO_SOURCE_VOICE_RECOGNITION: return "voice recognition";
- case AUDIO_SOURCE_VOICE_COMMUNICATION: return "voice communication";
- case AUDIO_SOURCE_REMOTE_SUBMIX: return "remote submix";
- case AUDIO_SOURCE_UNPROCESSED: return "unprocessed";
- case AUDIO_SOURCE_VOICE_PERFORMANCE: return "voice performance";
- case AUDIO_SOURCE_ECHO_REFERENCE: return "echo reference";
- case AUDIO_SOURCE_FM_TUNER: return "FM tuner";
- case AUDIO_SOURCE_HOTWORD: return "hotword";
- default: return "unknown";
- }
-}
-
AudioFlinger::ThreadBase::ThreadBase(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
audio_devices_t outDevice, audio_devices_t inDevice, type_t type, bool systemReady)
: Thread(false /*canCallJava*/),
@@ -717,8 +671,8 @@
event->mStatus = createAudioPatch_l(&data->mPatch, &data->mHandle);
const audio_devices_t newDevice = getDevice();
mLocalLog.log("CFG_EVENT_CREATE_AUDIO_PATCH: old device %#x (%s) new device %#x (%s)",
- (unsigned)oldDevice, devicesToString(oldDevice).c_str(),
- (unsigned)newDevice, devicesToString(newDevice).c_str());
+ (unsigned)oldDevice, toString(oldDevice).c_str(),
+ (unsigned)newDevice, toString(newDevice).c_str());
} break;
case CFG_EVENT_RELEASE_AUDIO_PATCH: {
const audio_devices_t oldDevice = getDevice();
@@ -727,8 +681,8 @@
event->mStatus = releaseAudioPatch_l(data->mHandle);
const audio_devices_t newDevice = getDevice();
mLocalLog.log("CFG_EVENT_RELEASE_AUDIO_PATCH: old device %#x (%s) new device %#x (%s)",
- (unsigned)oldDevice, devicesToString(oldDevice).c_str(),
- (unsigned)newDevice, devicesToString(newDevice).c_str());
+ (unsigned)oldDevice, toString(oldDevice).c_str(),
+ (unsigned)newDevice, toString(newDevice).c_str());
} break;
default:
ALOG_ASSERT(false, "processConfigEvents_l() unknown event type %d", event->mType);
@@ -858,9 +812,9 @@
dprintf(fd, " none\n");
}
// Note: output device may be used by capture threads for effects such as AEC.
- dprintf(fd, " Output device: %#x (%s)\n", mOutDevice, devicesToString(mOutDevice).c_str());
- dprintf(fd, " Input device: %#x (%s)\n", mInDevice, devicesToString(mInDevice).c_str());
- dprintf(fd, " Audio source: %d (%s)\n", mAudioSource, sourceToString(mAudioSource));
+ dprintf(fd, " Output device: %#x (%s)\n", mOutDevice, toString(mOutDevice).c_str());
+ dprintf(fd, " Input device: %#x (%s)\n", mInDevice, toString(mInDevice).c_str());
+ dprintf(fd, " Audio source: %d (%s)\n", mAudioSource, toString(mAudioSource).c_str());
// Dump timestamp statistics for the Thread types that support it.
if (mType == RECORD
@@ -1885,7 +1839,7 @@
AudioStreamOut *output = mOutput;
audio_output_flags_t flags = output != NULL ? output->flags : AUDIO_OUTPUT_FLAG_NONE;
dprintf(fd, " AudioStreamOut: %p flags %#x (%s)\n",
- output, flags, outputFlagsToString(flags).c_str());
+ output, flags, toString(flags).c_str());
dprintf(fd, " Frames written: %lld\n", (long long)mFramesWritten);
dprintf(fd, " Suspended frames: %lld\n", (long long)mSuspendedFrames);
if (mPipeSink.get() != nullptr) {
@@ -7769,7 +7723,7 @@
AudioStreamIn *input = mInput;
audio_input_flags_t flags = input != NULL ? input->flags : AUDIO_INPUT_FLAG_NONE;
dprintf(fd, " AudioStreamIn: %p flags %#x (%s)\n",
- input, flags, inputFlagsToString(flags).c_str());
+ input, flags, toString(flags).c_str());
dprintf(fd, " Frames read: %lld\n", (long long)mFramesRead);
if (mActiveTracks.isEmpty()) {
dprintf(fd, " No active record clients\n");
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
index cd1c2f2..3a4db90 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
@@ -41,9 +41,7 @@
const audio_port_config &cfg = cfgs[i];
dst->appendFormat("%*s [%s %d] ", spaces, "", prefix, i + 1);
if (cfg.type == AUDIO_PORT_TYPE_DEVICE) {
- std::string device;
- deviceToString(cfg.ext.device.type, device);
- dst->appendFormat("Device ID %d %s", cfg.id, device.c_str());
+ dst->appendFormat("Device ID %d %s", cfg.id, toString(cfg.ext.device.type).c_str());
} else {
dst->appendFormat("Mix ID %d I/O handle %d", cfg.id, cfg.ext.mix.handle);
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 2c4695d..23d764e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -66,9 +66,7 @@
RouteFlagTypeConverter::maskToString(mMix.mRouteFlags, routeFlagLiteral);
dst->appendFormat("%*s- Route Flags: %s\n", spaces, "", routeFlagLiteral.c_str());
- std::string deviceLiteral;
- deviceToString(mMix.mDeviceType, deviceLiteral);
- dst->appendFormat("%*s- device type: %s\n", spaces, "", deviceLiteral.c_str());
+ dst->appendFormat("%*s- device type: %s\n", spaces, "", toString(mMix.mDeviceType).c_str());
dst->appendFormat("%*s- device address: %s\n", spaces, "", mMix.mDeviceAddress.string());
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index a3121d1..91961d0 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include <audio_utils/string.h>
+#include <media/TypeConverter.h>
#include <set>
#include "DeviceDescriptor.h"
#include "TypeConverter.h"
@@ -346,10 +347,9 @@
if (!mTagName.isEmpty()) {
dst->appendFormat("%*s- tag name: %s\n", spaces, "", mTagName.string());
}
- std::string deviceLiteral;
- if (deviceToString(mDeviceType, deviceLiteral)) {
- dst->appendFormat("%*s- type: %-48s\n", spaces, "", deviceLiteral.c_str());
- }
+
+ dst->appendFormat("%*s- type: %-48s\n", spaces, "", ::android::toString(mDeviceType).c_str());
+
if (mAddress.size() != 0) {
dst->appendFormat("%*s- address: %-32s\n", spaces, "", mAddress.string());
}
@@ -401,9 +401,8 @@
void DeviceDescriptor::log() const
{
- std::string device;
- deviceToString(mDeviceType, device);
- ALOGI("Device id:%d type:0x%08X:%s, addr:%s", mId, mDeviceType, device.c_str(),
+ ALOGI("Device id:%d type:0x%08X:%s, addr:%s", mId, mDeviceType,
+ ::android::toString(mDeviceType).c_str(),
mAddress.string());
AudioPort::log(" ");
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 5cdc0e7..c19016f 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -450,28 +450,14 @@
return NO_ERROR;
}
-// this is replicated from frameworks/av/media/libaudioclient/AudioRecord.cpp
-// XXX -- figure out how to put it into a common, shared location
-
-static std::string audioSourceString(audio_source_t value) {
- std::string source;
- if (SourceTypeConverter::toString(value, source)) {
- return source;
- }
- char rawbuffer[16]; // room for "%d"
- snprintf(rawbuffer, sizeof(rawbuffer), "%d", value);
- return rawbuffer;
-}
-
std::string AudioPolicyService::getDeviceTypeStrForPortId(audio_port_handle_t portId) {
- std::string typeStr;
struct audio_port port = {};
port.id = portId;
status_t status = mAudioPolicyManager->getAudioPort(&port);
if (status == NO_ERROR && port.type == AUDIO_PORT_TYPE_DEVICE) {
- deviceToString(port.ext.device.type, typeStr);
+ return toString(port.ext.device.type);
}
- return typeStr;
+ return {};
}
status_t AudioPolicyService::startInput(audio_port_handle_t portId)
@@ -534,7 +520,7 @@
item->setInt32(kAudioPolicyStatus, status);
item->setCString(kAudioPolicyRqstSrc,
- audioSourceString(client->attributes.source).c_str());
+ toString(client->attributes.source).c_str());
item->setInt32(kAudioPolicyRqstSession, client->session);
if (client->opPackageName.size() != 0) {
item->setCString(kAudioPolicyRqstPkg,
@@ -554,7 +540,7 @@
if (other->active) {
// keeps the last of the clients marked active
item->setCString(kAudioPolicyActiveSrc,
- audioSourceString(other->attributes.source).c_str());
+ toString(other->attributes.source).c_str());
item->setInt32(kAudioPolicyActiveSession, other->session);
if (other->opPackageName.size() != 0) {
item->setCString(kAudioPolicyActivePkg,
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index 1c63f64..f3339a0 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -16,8 +16,6 @@
"liblog",
],
- compile_multilib: "32",
-
include_dirs: ["frameworks/av/include"],
cflags: [