Merge "mp3dec: Fix out of bound read error"
diff --git a/apex/Android.bp b/apex/Android.bp
index 80e751c..fac3831 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -31,6 +31,8 @@
"libmpeg2extractor",
"liboggextractor",
"libwavextractor",
+ // JNI
+ "libmediaparser-jni"
],
},
},
diff --git a/apex/manifest.json b/apex/manifest.json
index ddd642e..f1f69f4 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,4 +1,4 @@
{
"name": "com.android.media",
- "version": 300000000
+ "version": 309999900
}
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index 1f05d2e..e20d867 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media.swcodec",
- "version": 300000000,
+ "version": 309999900,
"requireNativeLibs": [
":sphal"
]
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 631f6cd..895514e 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -426,6 +426,7 @@
camera_metadata_ro_entry_t entry;
int ret = get_camera_metadata_ro_entry(rawMetadata, i, &entry);
if (ret != 0) {
+ mData->unlock(rawMetadata);
ALOGE("%s: error reading metadata index %zu", __FUNCTION__, i);
return ACAMERA_ERROR_UNKNOWN;
}
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index ebe7b40..38f7389 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -254,6 +254,8 @@
kParamIndexTunneledMode, // struct
kParamIndexTunnelHandle, // int32[]
kParamIndexTunnelSystemTime, // int64
+
+ kParamIndexStoreDmaBufUsage, // store, struct
};
}
@@ -2041,6 +2043,33 @@
C2StoreIonUsageInfo;
/**
+ * This structure describes the preferred DMA-Buf allocation parameters for a given memory usage.
+ */
+struct C2StoreDmaBufUsageStruct {
+ inline C2StoreDmaBufUsageStruct() { memset(this, 0, sizeof(*this)); }
+
+ inline C2StoreDmaBufUsageStruct(size_t flexCount, uint64_t usage_, uint32_t capacity_)
+ : usage(usage_), capacity(capacity_), allocFlags(0) {
+ memset(heapName, 0, flexCount);
+ }
+
+ uint64_t usage; ///< C2MemoryUsage
+ uint32_t capacity; ///< capacity
+ int32_t allocFlags; ///< ion allocation flags
+ char heapName[]; ///< dmabuf heap name
+
+ DEFINE_AND_DESCRIBE_FLEX_C2STRUCT(StoreDmaBufUsage, heapName)
+ C2FIELD(usage, "usage")
+ C2FIELD(capacity, "capacity")
+ C2FIELD(allocFlags, "alloc-flags")
+ C2FIELD(heapName, "heap-name")
+};
+
+// store, private
+typedef C2GlobalParam<C2Info, C2StoreDmaBufUsageStruct, kParamIndexStoreDmaBufUsage>
+ C2StoreDmaBufUsageInfo;
+
+/**
* Flexible pixel format descriptors
*/
struct C2FlexiblePixelFormatDescriptorStruct {
diff --git a/media/codec2/hidl/services/vendor.cpp b/media/codec2/hidl/services/vendor.cpp
index 81bffeb..3ddb039 100644
--- a/media/codec2/hidl/services/vendor.cpp
+++ b/media/codec2/hidl/services/vendor.cpp
@@ -122,6 +122,18 @@
})
.withSetter(SetIonUsage)
.build());
+
+ addParameter(
+ DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+ .withDefault(new C2StoreDmaBufUsageInfo())
+ .withFields({
+ C2F(mDmaBufUsageInfo, usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+ C2F(mDmaBufUsageInfo, capacity).inRange(0, UINT32_MAX, 1024),
+ C2F(mDmaBufUsageInfo, heapName).any(),
+ C2F(mDmaBufUsageInfo, allocFlags).flags({}),
+ })
+ .withSetter(SetDmaBufUsage)
+ .build());
}
virtual ~Interface() = default;
@@ -135,7 +147,16 @@
return C2R::Ok();
}
+ static C2R SetDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+ // Vendor's TODO: put appropriate mapping logic
+ strncpy(me.set().m.heapName, "system", me.v.flexCount());
+ me.set().m.allocFlags = 0;
+ return C2R::Ok();
+ }
+
+
std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+ std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
};
std::shared_ptr<C2ReflectorHelper> mReflectorHelper;
Interface mInterface;
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 1405b97..f816778 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2168,15 +2168,17 @@
return OK;
}
}
- uint64_t minUsage = usage.expected;
- uint64_t maxUsage = ~0ull;
std::set<C2Allocator::id_t> allocators;
GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
if (allocators.empty()) {
*isCompatible = false;
return OK;
}
+
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = ~0ull;
CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ minUsage |= usage.expected;
*isCompatible = ((maxUsage & minUsage) == minUsage);
return OK;
}
@@ -2203,14 +2205,16 @@
// static
std::shared_ptr<C2LinearBlock> CCodec::FetchLinearBlock(
size_t capacity, const C2MemoryUsage &usage, const std::vector<std::string> &names) {
- uint64_t minUsage = usage.expected;
- uint64_t maxUsage = ~0ull;
std::set<C2Allocator::id_t> allocators;
GetCommonAllocatorIds(names, C2Allocator::LINEAR, &allocators);
if (allocators.empty()) {
allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
}
+
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = ~0ull;
CalculateMinMaxUsage(names, &minUsage, &maxUsage);
+ minUsage |= usage.expected;
if ((maxUsage & minUsage) != minUsage) {
allocators.clear();
allocators.insert(C2PlatformAllocatorStore::DEFAULT_LINEAR);
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 6f7acce..60f4736 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -26,6 +26,7 @@
"C2AllocatorGralloc.cpp",
"C2Buffer.cpp",
"C2Config.cpp",
+ "C2DmaBufAllocator.cpp",
"C2PlatformStorePluginLoader.cpp",
"C2Store.cpp",
"platform/C2BqBuffer.cpp",
@@ -64,6 +65,7 @@
"libhardware",
"libhidlbase",
"libion",
+ "libdmabufheap",
"libfmq",
"liblog",
"libnativewindow",
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
new file mode 100644
index 0000000..59e82e2
--- /dev/null
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -0,0 +1,401 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2DmaBufAllocator"
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <C2Debug.h>
+#include <C2DmaBufAllocator.h>
+#include <C2ErrnoUtils.h>
+#include <linux/ion.h>
+#include <sys/mman.h>
+#include <unistd.h> // getpagesize, size_t, close, dup
+#include <utils/Log.h>
+
+#include <list>
+
+#ifdef __ANDROID_APEX__
+#include <android-base/properties.h>
+#endif
+
+namespace android {
+
+namespace {
+constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+}
+
+/* =========================== BUFFER HANDLE =========================== */
+/**
+ * Buffer handle
+ *
+ * Stores dmabuf fd & metadata
+ *
+ * This handle will not capture mapped fd-s as updating that would require a
+ * global mutex.
+ */
+
+struct C2HandleBuf : public C2Handle {
+ C2HandleBuf(int bufferFd, size_t size)
+ : C2Handle(cHeader),
+ mFds{bufferFd},
+ mInts{int(size & 0xFFFFFFFF), int((uint64_t(size) >> 32) & 0xFFFFFFFF), kMagic} {}
+
+ static bool IsValid(const C2Handle* const o);
+
+ int bufferFd() const { return mFds.mBuffer; }
+ size_t size() const {
+ return size_t(unsigned(mInts.mSizeLo)) | size_t(uint64_t(unsigned(mInts.mSizeHi)) << 32);
+ }
+
+ protected:
+ struct {
+ int mBuffer; // dmabuf fd
+ } mFds;
+ struct {
+ int mSizeLo; // low 32-bits of size
+ int mSizeHi; // high 32-bits of size
+ int mMagic;
+ } mInts;
+
+ private:
+ typedef C2HandleBuf _type;
+ enum {
+ kMagic = '\xc2io\x00',
+ numFds = sizeof(mFds) / sizeof(int),
+ numInts = sizeof(mInts) / sizeof(int),
+ version = sizeof(C2Handle)
+ };
+ // constexpr static C2Handle cHeader = { version, numFds, numInts, {} };
+ const static C2Handle cHeader;
+};
+
+const C2Handle C2HandleBuf::cHeader = {
+ C2HandleBuf::version, C2HandleBuf::numFds, C2HandleBuf::numInts, {}};
+
+// static
+bool C2HandleBuf::IsValid(const C2Handle* const o) {
+ if (!o || memcmp(o, &cHeader, sizeof(cHeader))) {
+ return false;
+ }
+ const C2HandleBuf* other = static_cast<const C2HandleBuf*>(o);
+ return other->mInts.mMagic == kMagic;
+}
+
+/* =========================== DMABUF ALLOCATION =========================== */
+class C2DmaBufAllocation : public C2LinearAllocation {
+ public:
+ /* Interface methods */
+ virtual c2_status_t map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+ void** addr /* nonnull */) override;
+ virtual c2_status_t unmap(void* addr, size_t size, C2Fence* fenceFd) override;
+ virtual ~C2DmaBufAllocation() override;
+ virtual const C2Handle* handle() const override;
+ virtual id_t getAllocatorId() const override;
+ virtual bool equals(const std::shared_ptr<C2LinearAllocation>& other) const override;
+
+ // internal methods
+ C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name, unsigned flags,
+ C2Allocator::id_t id);
+ C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id);
+
+ c2_status_t status() const;
+
+ protected:
+ virtual c2_status_t mapInternal(size_t mapSize, size_t mapOffset, size_t alignmentBytes,
+ int prot, int flags, void** base, void** addr) {
+ c2_status_t err = C2_OK;
+ *base = mmap(nullptr, mapSize, prot, flags, mHandle.bufferFd(), mapOffset);
+ ALOGV("mmap(size = %zu, prot = %d, flags = %d, mapFd = %d, offset = %zu) "
+ "returned (%d)",
+ mapSize, prot, flags, mHandle.bufferFd(), mapOffset, errno);
+ if (*base == MAP_FAILED) {
+ *base = *addr = nullptr;
+ err = c2_map_errno<EINVAL>(errno);
+ } else {
+ *addr = (uint8_t*)*base + alignmentBytes;
+ }
+ return err;
+ }
+
+ C2Allocator::id_t mId;
+ C2HandleBuf mHandle;
+ c2_status_t mInit;
+ struct Mapping {
+ void* addr;
+ size_t alignmentBytes;
+ size_t size;
+ };
+ std::list<Mapping> mMappings;
+
+ // TODO: we could make this encapsulate shared_ptr and copiable
+ C2_DO_NOT_COPY(C2DmaBufAllocation);
+};
+
+c2_status_t C2DmaBufAllocation::map(size_t offset, size_t size, C2MemoryUsage usage, C2Fence* fence,
+ void** addr) {
+ (void)fence; // TODO: wait for fence
+ *addr = nullptr;
+ if (!mMappings.empty()) {
+ ALOGV("multiple map");
+ // TODO: technically we should return DUPLICATE here, but our block views
+ // don't actually unmap, so we end up remapping the buffer multiple times.
+ //
+ // return C2_DUPLICATE;
+ }
+ if (size == 0) {
+ return C2_BAD_VALUE;
+ }
+
+ int prot = PROT_NONE;
+ int flags = MAP_SHARED;
+ if (usage.expected & C2MemoryUsage::CPU_READ) {
+ prot |= PROT_READ;
+ }
+ if (usage.expected & C2MemoryUsage::CPU_WRITE) {
+ prot |= PROT_WRITE;
+ }
+
+ size_t alignmentBytes = offset % PAGE_SIZE;
+ size_t mapOffset = offset - alignmentBytes;
+ size_t mapSize = size + alignmentBytes;
+ Mapping map = {nullptr, alignmentBytes, mapSize};
+
+ c2_status_t err =
+ mapInternal(mapSize, mapOffset, alignmentBytes, prot, flags, &(map.addr), addr);
+ if (map.addr) {
+ mMappings.push_back(map);
+ }
+ return err;
+}
+
+c2_status_t C2DmaBufAllocation::unmap(void* addr, size_t size, C2Fence* fence) {
+ if (mMappings.empty()) {
+ ALOGD("tried to unmap unmapped buffer");
+ return C2_NOT_FOUND;
+ }
+ for (auto it = mMappings.begin(); it != mMappings.end(); ++it) {
+ if (addr != (uint8_t*)it->addr + it->alignmentBytes ||
+ size + it->alignmentBytes != it->size) {
+ continue;
+ }
+ int err = munmap(it->addr, it->size);
+ if (err != 0) {
+ ALOGD("munmap failed");
+ return c2_map_errno<EINVAL>(errno);
+ }
+ if (fence) {
+ *fence = C2Fence(); // not using fences
+ }
+ (void)mMappings.erase(it);
+ ALOGV("successfully unmapped: %d", mHandle.bufferFd());
+ return C2_OK;
+ }
+ ALOGD("unmap failed to find specified map");
+ return C2_BAD_VALUE;
+}
+
+c2_status_t C2DmaBufAllocation::status() const {
+ return mInit;
+}
+
+C2Allocator::id_t C2DmaBufAllocation::getAllocatorId() const {
+ return mId;
+}
+
+bool C2DmaBufAllocation::equals(const std::shared_ptr<C2LinearAllocation>& other) const {
+ if (!other || other->getAllocatorId() != getAllocatorId()) {
+ return false;
+ }
+ // get user handle to compare objects
+ std::shared_ptr<C2DmaBufAllocation> otherAsBuf =
+ std::static_pointer_cast<C2DmaBufAllocation>(other);
+ return mHandle.bufferFd() == otherAsBuf->mHandle.bufferFd();
+}
+
+const C2Handle* C2DmaBufAllocation::handle() const {
+ return &mHandle;
+}
+
+C2DmaBufAllocation::~C2DmaBufAllocation() {
+ if (!mMappings.empty()) {
+ ALOGD("Dangling mappings!");
+ for (const Mapping& map : mMappings) {
+ int err = munmap(map.addr, map.size);
+ if (err) ALOGD("munmap failed");
+ }
+ }
+ if (mInit == C2_OK) {
+ native_handle_close(&mHandle);
+ }
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name,
+ unsigned flags, C2Allocator::id_t id)
+ : C2LinearAllocation(size), mHandle(-1, 0) {
+ int bufferFd = -1;
+ int ret = 0;
+
+ bufferFd = alloc.Alloc(heap_name, size, flags);
+ if (bufferFd < 0) ret = bufferFd;
+
+ mHandle = C2HandleBuf(bufferFd, size);
+ mId = id;
+ mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
+}
+
+C2DmaBufAllocation::C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id)
+ : C2LinearAllocation(size), mHandle(-1, 0) {
+ mHandle = C2HandleBuf(shareFd, size);
+ mId = id;
+ mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(0));
+}
+
+/* =========================== DMABUF ALLOCATOR =========================== */
+C2DmaBufAllocator::C2DmaBufAllocator(id_t id) : mInit(C2_OK) {
+ C2MemoryUsage minUsage = {0, 0};
+ C2MemoryUsage maxUsage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
+ Traits traits = {"android.allocator.dmabuf", id, LINEAR, minUsage, maxUsage};
+ mTraits = std::make_shared<Traits>(traits);
+}
+
+C2Allocator::id_t C2DmaBufAllocator::getId() const {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ return mTraits->id;
+}
+
+C2String C2DmaBufAllocator::getName() const {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ return mTraits->name;
+}
+
+std::shared_ptr<const C2Allocator::Traits> C2DmaBufAllocator::getTraits() const {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ return mTraits;
+}
+
+void C2DmaBufAllocator::setUsageMapper(const UsageMapperFn& mapper __unused, uint64_t minUsage,
+ uint64_t maxUsage, uint64_t blockSize) {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ mUsageMapperCache.clear();
+ mUsageMapperLru.clear();
+ mUsageMapper = mapper;
+ Traits traits = {mTraits->name, mTraits->id, LINEAR, C2MemoryUsage(minUsage),
+ C2MemoryUsage(maxUsage)};
+ mTraits = std::make_shared<Traits>(traits);
+ mBlockSize = blockSize;
+}
+
+std::size_t C2DmaBufAllocator::MapperKeyHash::operator()(const MapperKey& k) const {
+ return std::hash<uint64_t>{}(k.first) ^ std::hash<size_t>{}(k.second);
+}
+
+c2_status_t C2DmaBufAllocator::mapUsage(C2MemoryUsage usage, size_t capacity, C2String* heap_name,
+ unsigned* flags) {
+ std::lock_guard<std::mutex> lock(mUsageMapperLock);
+ c2_status_t res = C2_OK;
+ // align capacity
+ capacity = (capacity + mBlockSize - 1) & ~(mBlockSize - 1);
+ MapperKey key = std::make_pair(usage.expected, capacity);
+ auto entry = mUsageMapperCache.find(key);
+ if (entry == mUsageMapperCache.end()) {
+ if (mUsageMapper) {
+ res = mUsageMapper(usage, capacity, heap_name, flags);
+ } else {
+ // No system-uncached yet, so disabled for now
+ if (0 && !(usage.expected & (C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE)))
+ *heap_name = "system-uncached";
+ else
+ *heap_name = "system";
+ *flags = 0;
+ res = C2_NO_INIT;
+ }
+ // add usage to cache
+ MapperValue value = std::make_tuple(*heap_name, *flags, res);
+ mUsageMapperLru.emplace_front(key, value);
+ mUsageMapperCache.emplace(std::make_pair(key, mUsageMapperLru.begin()));
+ if (mUsageMapperCache.size() > USAGE_LRU_CACHE_SIZE) {
+ // remove LRU entry
+ MapperKey lruKey = mUsageMapperLru.front().first;
+ mUsageMapperCache.erase(lruKey);
+ mUsageMapperLru.pop_back();
+ }
+ } else {
+ // move entry to MRU
+ mUsageMapperLru.splice(mUsageMapperLru.begin(), mUsageMapperLru, entry->second);
+ const MapperValue& value = entry->second->second;
+ std::tie(*heap_name, *flags, res) = value;
+ }
+ return res;
+}
+
+c2_status_t C2DmaBufAllocator::newLinearAllocation(
+ uint32_t capacity, C2MemoryUsage usage, std::shared_ptr<C2LinearAllocation>* allocation) {
+ if (allocation == nullptr) {
+ return C2_BAD_VALUE;
+ }
+
+ allocation->reset();
+ if (mInit != C2_OK) {
+ return mInit;
+ }
+
+ C2String heap_name;
+ unsigned flags = 0;
+ c2_status_t ret = mapUsage(usage, capacity, &heap_name, &flags);
+ if (ret && ret != C2_NO_INIT) {
+ return ret;
+ }
+
+ std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
+ mBufferAllocator, capacity, heap_name, flags, getId());
+ ret = alloc->status();
+ if (ret == C2_OK) {
+ *allocation = alloc;
+ }
+ return ret;
+}
+
+c2_status_t C2DmaBufAllocator::priorLinearAllocation(
+ const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) {
+ *allocation = nullptr;
+ if (mInit != C2_OK) {
+ return mInit;
+ }
+
+ if (!C2HandleBuf::IsValid(handle)) {
+ return C2_BAD_VALUE;
+ }
+
+ // TODO: get capacity and validate it
+ const C2HandleBuf* h = static_cast<const C2HandleBuf*>(handle);
+ std::shared_ptr<C2DmaBufAllocation> alloc =
+ std::make_shared<C2DmaBufAllocation>(h->size(), h->bufferFd(), getId());
+ c2_status_t ret = alloc->status();
+ if (ret == C2_OK) {
+ *allocation = alloc;
+ native_handle_delete(
+ const_cast<native_handle_t*>(reinterpret_cast<const native_handle_t*>(handle)));
+ }
+ return ret;
+}
+
+// static
+bool C2DmaBufAllocator::CheckHandle(const C2Handle* const o) {
+ return C2HandleBuf::IsValid(o);
+}
+
+} // namespace android
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index d16527e..1e907c1 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -21,6 +21,7 @@
#include <C2AllocatorBlob.h>
#include <C2AllocatorGralloc.h>
#include <C2AllocatorIon.h>
+#include <C2DmaBufAllocator.h>
#include <C2BufferPriv.h>
#include <C2BqBufferPriv.h>
#include <C2Component.h>
@@ -82,6 +83,7 @@
/// returns a shared-singleton ion allocator
std::shared_ptr<C2Allocator> fetchIonAllocator();
+ std::shared_ptr<C2Allocator> fetchDmaBufAllocator();
/// returns a shared-singleton gralloc allocator
std::shared_ptr<C2Allocator> fetchGrallocAllocator();
@@ -99,6 +101,20 @@
C2PlatformAllocatorStoreImpl::C2PlatformAllocatorStoreImpl() {
}
+static bool using_ion(void) {
+ static int cached_result = -1;
+
+ if (cached_result == -1) {
+ struct stat buffer;
+ cached_result = (stat("/dev/ion", &buffer) == 0);
+ if (cached_result)
+ ALOGD("Using ION\n");
+ else
+ ALOGD("Using DMABUF Heaps\n");
+ }
+ return (cached_result == 1);
+}
+
c2_status_t C2PlatformAllocatorStoreImpl::fetchAllocator(
id_t id, std::shared_ptr<C2Allocator> *const allocator) {
allocator->reset();
@@ -107,8 +123,11 @@
}
switch (id) {
// TODO: should we implement a generic registry for all, and use that?
- case C2PlatformAllocatorStore::ION:
- *allocator = fetchIonAllocator();
+ case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
+ if (using_ion())
+ *allocator = fetchIonAllocator();
+ else
+ *allocator = fetchDmaBufAllocator();
break;
case C2PlatformAllocatorStore::GRALLOC:
@@ -142,7 +161,9 @@
namespace {
std::mutex gIonAllocatorMutex;
+std::mutex gDmaBufAllocatorMutex;
std::weak_ptr<C2AllocatorIon> gIonAllocator;
+std::weak_ptr<C2DmaBufAllocator> gDmaBufAllocator;
void UseComponentStoreForIonAllocator(
const std::shared_ptr<C2AllocatorIon> allocator,
@@ -197,6 +218,65 @@
allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
}
+void UseComponentStoreForDmaBufAllocator(const std::shared_ptr<C2DmaBufAllocator> allocator,
+ std::shared_ptr<C2ComponentStore> store) {
+ C2DmaBufAllocator::UsageMapperFn mapper;
+ const size_t maxHeapNameLen = 128;
+ uint64_t minUsage = 0;
+ uint64_t maxUsage = C2MemoryUsage(C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE).expected;
+ size_t blockSize = getpagesize();
+
+ // query min and max usage as well as block size via supported values
+ std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+ usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen);
+
+ std::vector<C2FieldSupportedValuesQuery> query = {
+ C2FieldSupportedValuesQuery::Possible(C2ParamField::Make(*usageInfo, usageInfo->m.usage)),
+ C2FieldSupportedValuesQuery::Possible(
+ C2ParamField::Make(*usageInfo, usageInfo->m.capacity)),
+ };
+ c2_status_t res = store->querySupportedValues_sm(query);
+ if (res == C2_OK) {
+ if (query[0].status == C2_OK) {
+ const C2FieldSupportedValues& fsv = query[0].values;
+ if (fsv.type == C2FieldSupportedValues::FLAGS && !fsv.values.empty()) {
+ minUsage = fsv.values[0].u64;
+ maxUsage = 0;
+ for (C2Value::Primitive v : fsv.values) {
+ maxUsage |= v.u64;
+ }
+ }
+ }
+ if (query[1].status == C2_OK) {
+ const C2FieldSupportedValues& fsv = query[1].values;
+ if (fsv.type == C2FieldSupportedValues::RANGE && fsv.range.step.u32 > 0) {
+ blockSize = fsv.range.step.u32;
+ }
+ }
+
+ mapper = [store](C2MemoryUsage usage, size_t capacity, C2String* heapName,
+ unsigned* flags) -> c2_status_t {
+ if (capacity > UINT32_MAX) {
+ return C2_BAD_VALUE;
+ }
+
+ std::unique_ptr<C2StoreDmaBufUsageInfo> usageInfo;
+ usageInfo = C2StoreDmaBufUsageInfo::AllocUnique(maxHeapNameLen, usage.expected, capacity);
+ std::vector<std::unique_ptr<C2SettingResult>> failures; // TODO: remove
+
+ c2_status_t res = store->config_sm({&*usageInfo}, &failures);
+ if (res == C2_OK) {
+ *heapName = C2String(usageInfo->m.heapName);
+ *flags = usageInfo->m.allocFlags;
+ }
+
+ return res;
+ };
+ }
+
+ allocator->setUsageMapper(mapper, minUsage, maxUsage, blockSize);
+}
+
}
void C2PlatformAllocatorStoreImpl::setComponentStore(std::shared_ptr<C2ComponentStore> store) {
@@ -233,6 +313,22 @@
return allocator;
}
+std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchDmaBufAllocator() {
+ std::lock_guard<std::mutex> lock(gDmaBufAllocatorMutex);
+ std::shared_ptr<C2DmaBufAllocator> allocator = gDmaBufAllocator.lock();
+ if (allocator == nullptr) {
+ std::shared_ptr<C2ComponentStore> componentStore;
+ {
+ std::lock_guard<std::mutex> lock(_mComponentStoreReadLock);
+ componentStore = _mComponentStore;
+ }
+ allocator = std::make_shared<C2DmaBufAllocator>(C2PlatformAllocatorStore::DMABUFHEAP);
+ UseComponentStoreForDmaBufAllocator(allocator, componentStore);
+ gDmaBufAllocator = allocator;
+ }
+ return allocator;
+}
+
std::shared_ptr<C2Allocator> C2PlatformAllocatorStoreImpl::fetchBlobAllocator() {
static std::mutex mutex;
static std::weak_ptr<C2Allocator> blobAllocator;
@@ -347,7 +443,7 @@
allocatorId = GetPreferredLinearAllocatorId(GetCodec2PoolMask());
}
switch(allocatorId) {
- case C2PlatformAllocatorStore::ION:
+ case C2PlatformAllocatorStore::ION: /* also ::DMABUFHEAP */
res = allocatorStore->fetchAllocator(
C2PlatformAllocatorStore::ION, &allocator);
if (res == C2_OK) {
@@ -645,6 +741,7 @@
struct Interface : public C2InterfaceHelper {
std::shared_ptr<C2StoreIonUsageInfo> mIonUsageInfo;
+ std::shared_ptr<C2StoreDmaBufUsageInfo> mDmaBufUsageInfo;
Interface(std::shared_ptr<C2ReflectorHelper> reflector)
: C2InterfaceHelper(reflector) {
@@ -680,7 +777,13 @@
me.set().minAlignment = 0;
#endif
return C2R::Ok();
- }
+ };
+
+ static C2R setDmaBufUsage(bool /* mayBlock */, C2P<C2StoreDmaBufUsageInfo> &me) {
+ strncpy(me.set().m.heapName, "system", me.v.flexCount());
+ me.set().m.allocFlags = 0;
+ return C2R::Ok();
+ };
};
addParameter(
@@ -695,6 +798,18 @@
})
.withSetter(Setter::setIonUsage)
.build());
+
+ addParameter(
+ DefineParam(mDmaBufUsageInfo, "dmabuf-usage")
+ .withDefault(C2StoreDmaBufUsageInfo::AllocShared(0))
+ .withFields({
+ C2F(mDmaBufUsageInfo, m.usage).flags({C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE}),
+ C2F(mDmaBufUsageInfo, m.capacity).inRange(0, UINT32_MAX, 1024),
+ C2F(mDmaBufUsageInfo, m.allocFlags).flags({}),
+ C2F(mDmaBufUsageInfo, m.heapName).any(),
+ })
+ .withSetter(Setter::setDmaBufUsage)
+ .build());
}
};
diff --git a/media/codec2/vndk/include/C2DmaBufAllocator.h b/media/codec2/vndk/include/C2DmaBufAllocator.h
new file mode 100644
index 0000000..abb8307
--- /dev/null
+++ b/media/codec2/vndk/include/C2DmaBufAllocator.h
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+#define STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
+
+#include <BufferAllocator/BufferAllocator.h>
+#include <C2Buffer.h>
+#include <sys/stat.h> // stat
+
+#include <functional>
+#include <list>
+#include <mutex>
+#include <tuple>
+#include <unordered_map>
+
+namespace android {
+
+class C2DmaBufAllocator : public C2Allocator {
+ public:
+ virtual c2_status_t newLinearAllocation(
+ uint32_t capacity, C2MemoryUsage usage,
+ std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+ virtual c2_status_t priorLinearAllocation(
+ const C2Handle* handle, std::shared_ptr<C2LinearAllocation>* allocation) override;
+
+ C2DmaBufAllocator(id_t id);
+
+ virtual c2_status_t status() const { return mInit; }
+
+ virtual bool checkHandle(const C2Handle* const o) const override { return CheckHandle(o); }
+
+ static bool CheckHandle(const C2Handle* const o);
+
+ virtual id_t getId() const override;
+
+ virtual C2String getName() const override;
+
+ virtual std::shared_ptr<const Traits> getTraits() const override;
+
+ // Usage mapper function used by the allocator
+ // (usage, capacity) => (heapName, flags)
+ //
+ // capacity is aligned to the default block-size (defaults to page size) to
+ // reduce caching overhead
+ typedef std::function<c2_status_t(C2MemoryUsage, size_t,
+ /* => */ C2String*, unsigned*)>
+ UsageMapperFn;
+
+ /**
+ * Updates the usage mapper for subsequent new allocations, as well as the
+ * supported minimum and maximum usage masks and default block-size to use
+ * for the mapper.
+ *
+ * \param mapper This method is called to map Codec 2.0 buffer usage
+ * to dmabuf heap name and flags required by the dma
+ * buf heap device
+ *
+ * \param minUsage Minimum buffer usage required for supported
+ * allocations (defaults to 0)
+ *
+ * \param maxUsage Maximum buffer usage supported by the ion allocator
+ * (defaults to SW_READ | SW_WRITE)
+ *
+ * \param blockSize Alignment used prior to calling |mapper| for the
+ * buffer capacity. This also helps reduce the size of
+ * cache required for caching mapper results.
+ * (defaults to the page size)
+ */
+ void setUsageMapper(const UsageMapperFn& mapper, uint64_t minUsage, uint64_t maxUsage,
+ uint64_t blockSize);
+
+ private:
+ c2_status_t mInit;
+ BufferAllocator mBufferAllocator;
+
+ c2_status_t mapUsage(C2MemoryUsage usage, size_t size,
+ /* => */ C2String* heap_name, unsigned* flags);
+
+ // this locks mTraits, mBlockSize, mUsageMapper, mUsageMapperLru and
+ // mUsageMapperCache
+ mutable std::mutex mUsageMapperLock;
+ std::shared_ptr<const Traits> mTraits;
+ size_t mBlockSize;
+ UsageMapperFn mUsageMapper;
+ typedef std::pair<uint64_t, size_t> MapperKey;
+ struct MapperKeyHash {
+ std::size_t operator()(const MapperKey&) const;
+ };
+ typedef std::tuple<C2String, unsigned, c2_status_t> MapperValue;
+ typedef std::pair<MapperKey, MapperValue> MapperKeyValue;
+ typedef std::list<MapperKeyValue>::iterator MapperKeyValuePointer;
+ std::list<MapperKeyValue> mUsageMapperLru;
+ std::unordered_map<MapperKey, MapperKeyValuePointer, MapperKeyHash> mUsageMapperCache;
+};
+} // namespace android
+
+#endif // STAGEFRIGHT_CODEC2_ALLOCATOR_BUF_H_
diff --git a/media/codec2/vndk/include/C2PlatformSupport.h b/media/codec2/vndk/include/C2PlatformSupport.h
index a14e0d3..4814494 100644
--- a/media/codec2/vndk/include/C2PlatformSupport.h
+++ b/media/codec2/vndk/include/C2PlatformSupport.h
@@ -47,6 +47,17 @@
*/
ION = PLATFORM_START,
+ /*
+ * ID of the DMA-Buf Heap (ion replacement) backed platform allocator.
+ *
+ * C2Handle consists of:
+ * fd shared dmabuf buffer handle
+ * int size (lo 32 bits)
+ * int size (hi 32 bits)
+ * int magic '\xc2io\x00'
+ */
+ DMABUFHEAP = ION,
+
/**
* ID of the gralloc backed platform allocator.
*
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index 94f10e5..c2dcd35 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -75,6 +75,7 @@
}
AudioStreamInternal::~AudioStreamInternal() {
+ ALOGD("%s() %p called", __func__, this);
}
aaudio_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
@@ -270,21 +271,21 @@
return result;
error:
- releaseCloseFinal();
+ safeReleaseClose();
return result;
}
// This must be called under mStreamLock.
aaudio_result_t AudioStreamInternal::release_l() {
aaudio_result_t result = AAUDIO_OK;
- ALOGV("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
+ ALOGD("%s(): mServiceStreamHandle = 0x%08X", __func__, mServiceStreamHandle);
if (mServiceStreamHandle != AAUDIO_HANDLE_INVALID) {
aaudio_stream_state_t currentState = getState();
// Don't release a stream while it is running. Stop it first.
// If DISCONNECTED then we should still try to stop in case the
// error callback is still running.
if (isActive() || currentState == AAUDIO_STREAM_STATE_DISCONNECTED) {
- requestStop();
+ requestStop_l();
}
logReleaseBufferState();
@@ -330,7 +331,7 @@
* The processing code will then save the current offset
* between client and server and apply that to any position given to the app.
*/
-aaudio_result_t AudioStreamInternal::requestStart()
+aaudio_result_t AudioStreamInternal::requestStart_l()
{
int64_t startTime;
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
@@ -373,7 +374,7 @@
* AAUDIO_NANOS_PER_SECOND
/ getSampleRate();
mCallbackEnabled.store(true);
- result = createThread(periodNanos, aaudio_callback_thread_proc, this);
+ result = createThread_l(periodNanos, aaudio_callback_thread_proc, this);
}
if (result != AAUDIO_OK) {
setState(originalState);
@@ -399,26 +400,29 @@
}
// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::stopCallback()
+aaudio_result_t AudioStreamInternal::stopCallback_l()
{
if (isDataCallbackSet()
&& (isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
mCallbackEnabled.store(false);
- aaudio_result_t result = joinThread(NULL); // may temporarily unlock mStreamLock
+ aaudio_result_t result = joinThread_l(NULL); // may temporarily unlock mStreamLock
if (result == AAUDIO_ERROR_INVALID_HANDLE) {
ALOGD("%s() INVALID_HANDLE, stream was probably stolen", __func__);
result = AAUDIO_OK;
}
return result;
} else {
+ ALOGD("%s() skipped, isDataCallbackSet() = %d, isActive() = %d, getState() = %d", __func__,
+ isDataCallbackSet(), isActive(), getState());
return AAUDIO_OK;
}
}
// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternal::requestStop() {
- aaudio_result_t result = stopCallback();
+aaudio_result_t AudioStreamInternal::requestStop_l() {
+ aaudio_result_t result = stopCallback_l();
if (result != AAUDIO_OK) {
+ ALOGW("%s() stop callback returned %d, returning early", __func__, result);
return result;
}
// The stream may have been unlocked temporarily to let a callback finish
@@ -426,6 +430,7 @@
// Check to make sure the stream still needs to be stopped.
// See also AudioStream::safeStop().
if (!(isActive() || getState() == AAUDIO_STREAM_STATE_DISCONNECTED)) {
+ ALOGD("%s() returning early, not active or disconnected", __func__);
return AAUDIO_OK;
}
@@ -805,11 +810,15 @@
return mBufferCapacityInFrames;
}
-// This must be called under mStreamLock.
aaudio_result_t AudioStreamInternal::joinThread(void** returnArg) {
return AudioStream::joinThread(returnArg, calculateReasonableTimeout(getFramesPerBurst()));
}
+// This must be called under mStreamLock.
+aaudio_result_t AudioStreamInternal::joinThread_l(void** returnArg) {
+ return AudioStream::joinThread_l(returnArg, calculateReasonableTimeout(getFramesPerBurst()));
+}
+
bool AudioStreamInternal::isClockModelInControl() const {
return isActive() && mAudioEndpoint->isFreeRunning() && mClockModel.isRunning();
}
diff --git a/media/libaaudio/src/client/AudioStreamInternal.h b/media/libaaudio/src/client/AudioStreamInternal.h
index d7024cf..1838b53 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.h
+++ b/media/libaaudio/src/client/AudioStreamInternal.h
@@ -44,9 +44,9 @@
AudioStreamInternal(AAudioServiceInterface &serviceInterface, bool inService);
virtual ~AudioStreamInternal();
- aaudio_result_t requestStart() override;
+ aaudio_result_t requestStart_l() override;
- aaudio_result_t requestStop() override;
+ aaudio_result_t requestStop_l() override;
aaudio_result_t getTimestamp(clockid_t clockId,
int64_t *framePosition,
@@ -117,7 +117,9 @@
aaudio_result_t processCommands();
- aaudio_result_t stopCallback();
+ aaudio_result_t stopCallback_l();
+
+ aaudio_result_t joinThread_l(void** returnArg);
virtual void prepareBuffersForStart() {}
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
index 980592c..b81e5e4 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.cpp
@@ -56,7 +56,7 @@
getDeviceChannelCount());
if (result != AAUDIO_OK) {
- releaseCloseFinal();
+ safeReleaseClose();
}
// Sample rate is constrained to common values by now and should not overflow.
int32_t numFrames = kRampMSec * getSampleRate() / AAUDIO_MILLIS_PER_SECOND;
@@ -66,9 +66,9 @@
}
// This must be called under mStreamLock.
-aaudio_result_t AudioStreamInternalPlay::requestPause()
+aaudio_result_t AudioStreamInternalPlay::requestPause_l()
{
- aaudio_result_t result = stopCallback();
+ aaudio_result_t result = stopCallback_l();
if (result != AAUDIO_OK) {
return result;
}
@@ -83,7 +83,7 @@
return mServiceInterface.pauseStream(mServiceStreamHandle);
}
-aaudio_result_t AudioStreamInternalPlay::requestFlush() {
+aaudio_result_t AudioStreamInternalPlay::requestFlush_l() {
if (mServiceStreamHandle == AAUDIO_HANDLE_INVALID) {
ALOGW("%s() mServiceStreamHandle invalid", __func__);
return AAUDIO_ERROR_INVALID_STATE;
diff --git a/media/libaaudio/src/client/AudioStreamInternalPlay.h b/media/libaaudio/src/client/AudioStreamInternalPlay.h
index 7b1cddc..03c957d 100644
--- a/media/libaaudio/src/client/AudioStreamInternalPlay.h
+++ b/media/libaaudio/src/client/AudioStreamInternalPlay.h
@@ -35,9 +35,9 @@
aaudio_result_t open(const AudioStreamBuilder &builder) override;
- aaudio_result_t requestPause() override;
+ aaudio_result_t requestPause_l() override;
- aaudio_result_t requestFlush() override;
+ aaudio_result_t requestFlush_l() override;
bool isFlushSupported() const override {
// Only implement FLUSH for OUTPUT streams.
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 310ffbe..ba86170 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -42,16 +42,20 @@
: mPlayerBase(new MyPlayerBase())
, mStreamId(AAudio_getNextStreamId())
{
- // mThread is a pthread_t of unknown size so we need memset.
- memset(&mThread, 0, sizeof(mThread));
setPeriodNanoseconds(0);
}
AudioStream::~AudioStream() {
- // Please preserve this log because there have been several bugs related to
+ // Please preserve these logs because there have been several bugs related to
// AudioStream deletion and late callbacks.
ALOGD("%s(s#%u) mPlayerBase strongCount = %d",
__func__, getId(), mPlayerBase->getStrongCount());
+
+ ALOGE_IF(pthread_equal(pthread_self(), mThread),
+ "%s() destructor running in callback", __func__);
+
+ ALOGE_IF(mHasThread, "%s() callback thread never join()ed", __func__);
+
// If the stream is deleted when OPEN or in use then audio resources will leak.
// This would indicate an internal error. So we want to find this ASAP.
LOG_ALWAYS_FATAL_IF(!(getState() == AAUDIO_STREAM_STATE_CLOSED
@@ -164,7 +168,7 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = requestStart();
+ aaudio_result_t result = requestStart_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
(void) mPlayerBase->start();
@@ -214,7 +218,7 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- aaudio_result_t result = requestPause();
+ aaudio_result_t result = requestPause_l();
if (result == AAUDIO_OK) {
// We only call this for logging in "dumpsys audio". So ignore return code.
(void) mPlayerBase->pause();
@@ -239,7 +243,7 @@
return result;
}
- return requestFlush();
+ return requestFlush_l();
}
aaudio_result_t AudioStream::systemStopFromCallback() {
@@ -299,11 +303,11 @@
return AAUDIO_ERROR_INVALID_STATE;
}
- return requestStop();
+ return requestStop_l();
}
aaudio_result_t AudioStream::safeRelease() {
- // This get temporarily unlocked in the MMAP release() when joining callback threads.
+ // This may get temporarily unlocked in the MMAP release() when joining callback threads.
std::lock_guard<std::mutex> lock(mStreamLock);
if (collidesWithCallback()) {
ALOGE("%s cannot be called from a callback!", __func__);
@@ -322,7 +326,14 @@
ALOGE("%s cannot be called from a callback!", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
- releaseCloseFinal();
+ releaseCloseFinal_l();
+ return AAUDIO_OK;
+}
+
+aaudio_result_t AudioStream::safeReleaseCloseFromCallback() {
+ // This get temporarily unlocked in the MMAP release() when joining callback threads.
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ releaseCloseFinal_l();
return AAUDIO_OK;
}
@@ -403,23 +414,28 @@
return procResult;
}
-// This is the entry point for the new thread created by createThread().
+
+// This is the entry point for the new thread created by createThread_l().
// It converts the 'C' function call to a C++ method call.
static void* AudioStream_internalThreadProc(void* threadArg) {
AudioStream *audioStream = (AudioStream *) threadArg;
- // Use an sp<> to prevent the stream from being deleted while running.
+ // Prevent the stream from being deleted while being used.
+ // This is just for extra safety. It is probably not needed because
+ // this callback should be joined before the stream is closed.
android::sp<AudioStream> protectedStream(audioStream);
+ // Balance the incStrong() in createThread_l().
+ protectedStream->decStrong(nullptr);
return protectedStream->wrapUserThread();
}
// This is not exposed in the API.
// But it is still used internally to implement callbacks for MMAP mode.
-aaudio_result_t AudioStream::createThread(int64_t periodNanoseconds,
- aaudio_audio_thread_proc_t threadProc,
- void* threadArg)
+aaudio_result_t AudioStream::createThread_l(int64_t periodNanoseconds,
+ aaudio_audio_thread_proc_t threadProc,
+ void* threadArg)
{
if (mHasThread) {
- ALOGE("createThread() - mHasThread already true");
+ ALOGE("%s() - mHasThread already true", __func__);
return AAUDIO_ERROR_INVALID_STATE;
}
if (threadProc == nullptr) {
@@ -429,10 +445,14 @@
mThreadProc = threadProc;
mThreadArg = threadArg;
setPeriodNanoseconds(periodNanoseconds);
+ // Prevent this object from getting deleted before the thread has a chance to create
+ // its strong pointer. Assume the thread will call decStrong().
+ this->incStrong(nullptr);
int err = pthread_create(&mThread, nullptr, AudioStream_internalThreadProc, this);
if (err != 0) {
android::status_t status = -errno;
- ALOGE("createThread() - pthread_create() failed, %d", status);
+ ALOGE("%s() - pthread_create() failed, %d", __func__, status);
+ this->decStrong(nullptr); // Because the thread won't do it.
return AAudioConvert_androidToAAudioResult(status);
} else {
// TODO Use AAudioThread or maybe AndroidThread
@@ -452,17 +472,23 @@
}
}
+aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds) {
+ // This may get temporarily unlocked in the MMAP release() when joining callback threads.
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ return joinThread_l(returnArg, timeoutNanoseconds);
+}
+
// This must be called under mStreamLock.
-aaudio_result_t AudioStream::joinThread(void** returnArg, int64_t timeoutNanoseconds __unused)
+aaudio_result_t AudioStream::joinThread_l(void** returnArg, int64_t /* timeoutNanoseconds */)
{
if (!mHasThread) {
- ALOGE("joinThread() - but has no thread");
+ ALOGD("joinThread() - but has no thread");
return AAUDIO_ERROR_INVALID_STATE;
}
aaudio_result_t result = AAUDIO_OK;
// If the callback is stopping the stream because the app passed back STOP
// then we don't need to join(). The thread is already about to exit.
- if (pthread_self() != mThread) {
+ if (!pthread_equal(pthread_self(), mThread)) {
// Called from an app thread. Not the callback.
// Unlock because the callback may be trying to stop the stream but is blocked.
mStreamLock.unlock();
@@ -477,11 +503,15 @@
if (err) {
ALOGE("%s() pthread_join() returns err = %d", __func__, err);
result = AAudioConvert_androidToAAudioResult(-err);
+ } else {
+ ALOGD("%s() pthread_join succeeded", __func__);
+ // This must be set false so that the callback thread can be created
+ // when the stream is restarted.
+ mHasThread = false;
}
+ } else {
+ ALOGD("%s() pthread_join() called on itself!", __func__);
}
- // This must be set false so that the callback thread can be created
- // when the stream is restarted.
- mHasThread = false;
return (result != AAUDIO_OK) ? result : mThreadRegistrationResult;
}
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index e438477..d9a9d8e 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -60,7 +60,7 @@
/* Asynchronous requests.
* Use waitForStateChange() to wait for completion.
*/
- virtual aaudio_result_t requestStart() = 0;
+ virtual aaudio_result_t requestStart_l() = 0;
/**
* Check the state to see if Pause is currently legal.
@@ -80,18 +80,17 @@
return false;
}
- virtual aaudio_result_t requestPause()
- {
+ virtual aaudio_result_t requestPause_l() {
// Only implement this for OUTPUT streams.
return AAUDIO_ERROR_UNIMPLEMENTED;
}
- virtual aaudio_result_t requestFlush() {
+ virtual aaudio_result_t requestFlush_l() {
// Only implement this for OUTPUT streams.
return AAUDIO_ERROR_UNIMPLEMENTED;
}
- virtual aaudio_result_t requestStop() = 0;
+ virtual aaudio_result_t requestStop_l() = 0;
public:
virtual aaudio_result_t getTimestamp(clockid_t clockId,
@@ -152,17 +151,6 @@
setState(AAUDIO_STREAM_STATE_CLOSED);
}
- /**
- * Release then close the stream.
- */
- void releaseCloseFinal() {
- if (getState() != AAUDIO_STREAM_STATE_CLOSING) { // not already released?
- // Ignore result and keep closing.
- (void) release_l();
- }
- close_l();
- }
-
// This is only used to identify a stream in the logs without
// revealing any pointers.
aaudio_stream_id_t getId() {
@@ -171,9 +159,9 @@
virtual aaudio_result_t setBufferSize(int32_t requestedFrames) = 0;
- virtual aaudio_result_t createThread(int64_t periodNanoseconds,
- aaudio_audio_thread_proc_t threadProc,
- void *threadArg);
+ virtual aaudio_result_t createThread_l(int64_t periodNanoseconds,
+ aaudio_audio_thread_proc_t threadProc,
+ void *threadArg);
aaudio_result_t joinThread(void **returnArg, int64_t timeoutNanoseconds);
@@ -432,6 +420,8 @@
*/
aaudio_result_t safeReleaseClose();
+ aaudio_result_t safeReleaseCloseFromCallback();
+
protected:
// PlayerBase allows the system to control the stream volume.
@@ -543,6 +533,8 @@
mSessionId = sessionId;
}
+ aaudio_result_t joinThread_l(void **returnArg, int64_t timeoutNanoseconds);
+
std::atomic<bool> mCallbackEnabled{false};
float mDuckAndMuteVolume = 1.0f;
@@ -613,6 +605,17 @@
aaudio_result_t safeStop();
+ /**
+ * Release then close the stream.
+ */
+ void releaseCloseFinal_l() {
+ if (getState() != AAUDIO_STREAM_STATE_CLOSING) { // not already released?
+ // Ignore result and keep closing.
+ (void) release_l();
+ }
+ close_l();
+ }
+
std::mutex mStreamLock;
const android::sp<MyPlayerBase> mPlayerBase;
@@ -654,7 +657,7 @@
// background thread ----------------------------------
bool mHasThread = false;
- pthread_t mThread; // initialized in constructor
+ pthread_t mThread = {};
// These are set by the application thread and then read by the audio pthread.
std::atomic<int64_t> mPeriodNanoseconds; // for tuning SCHED_FIFO threads
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 33c1bf5..fdaa2ab 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -226,7 +226,7 @@
ALOGD("%s() request DISCONNECT in data callback, device %d => %d",
__func__, (int) getDeviceId(), (int) deviceId);
// If the stream is stopped before the data callback has a chance to handle the
- // request then the requestStop() and requestPause() methods will handle it after
+ // request then the requestStop_l() and requestPause() methods will handle it after
// the callback has stopped.
mRequestDisconnect.request();
} else {
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.cpp b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
index d46ef56..45b2258 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.cpp
@@ -185,7 +185,7 @@
// Did we get a valid track?
status_t status = mAudioRecord->initCheck();
if (status != OK) {
- releaseCloseFinal();
+ safeReleaseClose();
ALOGE("open(), initCheck() returned %d", status);
return AAudioConvert_androidToAAudioResult(status);
}
@@ -341,7 +341,7 @@
return;
}
-aaudio_result_t AudioStreamRecord::requestStart()
+aaudio_result_t AudioStreamRecord::requestStart_l()
{
if (mAudioRecord.get() == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
@@ -365,7 +365,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamRecord::requestStop() {
+aaudio_result_t AudioStreamRecord::requestStop_l() {
if (mAudioRecord.get() == nullptr) {
return AAUDIO_ERROR_INVALID_STATE;
}
diff --git a/media/libaaudio/src/legacy/AudioStreamRecord.h b/media/libaaudio/src/legacy/AudioStreamRecord.h
index ad8dfe4..fe9689f 100644
--- a/media/libaaudio/src/legacy/AudioStreamRecord.h
+++ b/media/libaaudio/src/legacy/AudioStreamRecord.h
@@ -41,8 +41,8 @@
aaudio_result_t release_l() override;
void close_l() override;
- aaudio_result_t requestStart() override;
- aaudio_result_t requestStop() override;
+ aaudio_result_t requestStart_l() override;
+ aaudio_result_t requestStop_l() override;
virtual aaudio_result_t getTimestamp(clockid_t clockId,
int64_t *framePosition,
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.cpp b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
index 307904e..1d036d0 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.cpp
@@ -178,7 +178,7 @@
// Did we get a valid track?
status_t status = mAudioTrack->initCheck();
if (status != NO_ERROR) {
- releaseCloseFinal();
+ safeReleaseClose();
ALOGE("open(), initCheck() returned %d", status);
return AAudioConvert_androidToAAudioResult(status);
}
@@ -293,7 +293,7 @@
return;
}
-aaudio_result_t AudioStreamTrack::requestStart() {
+aaudio_result_t AudioStreamTrack::requestStart_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("requestStart() no AudioTrack");
return AAUDIO_ERROR_INVALID_STATE;
@@ -320,7 +320,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::requestPause() {
+aaudio_result_t AudioStreamTrack::requestPause_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("%s() no AudioTrack", __func__);
return AAUDIO_ERROR_INVALID_STATE;
@@ -336,7 +336,7 @@
return checkForDisconnectRequest(false);
}
-aaudio_result_t AudioStreamTrack::requestFlush() {
+aaudio_result_t AudioStreamTrack::requestFlush_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("%s() no AudioTrack", __func__);
return AAUDIO_ERROR_INVALID_STATE;
@@ -350,7 +350,7 @@
return AAUDIO_OK;
}
-aaudio_result_t AudioStreamTrack::requestStop() {
+aaudio_result_t AudioStreamTrack::requestStop_l() {
if (mAudioTrack.get() == nullptr) {
ALOGE("%s() no AudioTrack", __func__);
return AAUDIO_ERROR_INVALID_STATE;
diff --git a/media/libaaudio/src/legacy/AudioStreamTrack.h b/media/libaaudio/src/legacy/AudioStreamTrack.h
index 5a8fb39..654ea9b 100644
--- a/media/libaaudio/src/legacy/AudioStreamTrack.h
+++ b/media/libaaudio/src/legacy/AudioStreamTrack.h
@@ -44,10 +44,10 @@
aaudio_result_t release_l() override;
void close_l() override;
- aaudio_result_t requestStart() override;
- aaudio_result_t requestPause() override;
- aaudio_result_t requestFlush() override;
- aaudio_result_t requestStop() override;
+ aaudio_result_t requestStart_l() override;
+ aaudio_result_t requestPause_l() override;
+ aaudio_result_t requestFlush_l() override;
+ aaudio_result_t requestStop_l() override;
bool isFlushSupported() const override {
// Only implement FLUSH for OUTPUT streams.
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index dbb3d2b..3dfb801 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -27,7 +27,7 @@
#include "core/AudioGlobal.h"
#include <aaudio/AAudioTesting.h>
#include <math.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
#include <assert.h>
#include "utility/AAudioUtilities.h"
diff --git a/media/libaaudio/tests/test_stop_hang.cpp b/media/libaaudio/tests/test_stop_hang.cpp
index 2397b6c..982ff4a 100644
--- a/media/libaaudio/tests/test_stop_hang.cpp
+++ b/media/libaaudio/tests/test_stop_hang.cpp
@@ -45,7 +45,7 @@
{
// Will block if the thread is running.
// This mutex is used to close() immediately after the callback returns
- // and before the requestStop() is called.
+ // and before the requestStop_l() is called.
std::lock_guard<std::mutex> lock(doneLock);
if (done) break;
}
diff --git a/media/libaudioclient/AidlConversion.cpp b/media/libaudioclient/AidlConversion.cpp
new file mode 100644
index 0000000..95a6a4a
--- /dev/null
+++ b/media/libaudioclient/AidlConversion.cpp
@@ -0,0 +1,1223 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <limits>
+
+#define LOG_TAG "AidlConversion"
+//#define LOG_NDEBUG 0
+#include <system/audio.h>
+#include <utils/Log.h>
+
+#include "media/AidlConversion.h"
+
+#define VALUE_OR_RETURN(result) \
+ ({ \
+ auto _tmp = (result); \
+ if (!_tmp.ok()) return unexpected(_tmp.error()); \
+ _tmp.value(); \
+ })
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities
+
+namespace android {
+
+using base::unexpected;
+
+namespace {
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// The code below establishes:
+// IntegralTypeOf<T>, which works for either integral types (in which case it evaluates to T), or
+// enum types (in which case it evaluates to std::underlying_type_T<T>).
+
+template<typename T, typename = std::enable_if_t<std::is_integral_v<T> || std::is_enum_v<T>>>
+struct IntegralTypeOfStruct {
+ using Type = T;
+};
+
+template<typename T>
+struct IntegralTypeOfStruct<T, std::enable_if_t<std::is_enum_v<T>>> {
+ using Type = std::underlying_type_t<T>;
+};
+
+template<typename T>
+using IntegralTypeOf = typename IntegralTypeOfStruct<T>::Type;
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Utilities for handling bitmasks.
+
+template<typename Enum>
+Enum index2enum_index(int index) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ return static_cast<Enum>(index);
+}
+
+template<typename Enum>
+Enum index2enum_bitmask(int index) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ return static_cast<Enum>(1 << index);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_bitmask(Enum e) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+ return static_cast<Mask>(e);
+}
+
+template<typename Mask, typename Enum>
+Mask enumToMask_index(Enum e) {
+ static_assert(std::is_enum_v<Enum> || std::is_integral_v<Enum>);
+ static_assert(std::is_enum_v<Mask> || std::is_integral_v<Mask>);
+ return static_cast<Mask>(static_cast<std::make_unsigned_t<IntegralTypeOf<Mask>>>(1)
+ << static_cast<int>(e));
+}
+
+template<typename DestMask, typename SrcMask, typename DestEnum, typename SrcEnum>
+ConversionResult<DestMask> convertBitmask(
+ SrcMask src, const std::function<ConversionResult<DestEnum>(SrcEnum)>& enumConversion,
+ const std::function<SrcEnum(int)>& srcIndexToEnum,
+ const std::function<DestMask(DestEnum)>& destEnumToMask) {
+ using UnsignedDestMask = std::make_unsigned_t<IntegralTypeOf<DestMask>>;
+ using UnsignedSrcMask = std::make_unsigned_t<IntegralTypeOf<SrcMask>>;
+
+ UnsignedDestMask dest = static_cast<UnsignedDestMask>(0);
+ UnsignedSrcMask usrc = static_cast<UnsignedSrcMask>(src);
+
+ int srcBitIndex = 0;
+ while (usrc != 0) {
+ if (usrc & 1) {
+ SrcEnum srcEnum = srcIndexToEnum(srcBitIndex);
+ DestEnum destEnum = VALUE_OR_RETURN(enumConversion(srcEnum));
+ DestMask destMask = destEnumToMask(destEnum);
+ dest |= destMask;
+ }
+ ++srcBitIndex;
+ usrc >>= 1;
+ }
+ return static_cast<DestMask>(dest);
+}
+
+template<typename Mask, typename Enum>
+bool bitmaskIsSet(Mask mask, Enum index) {
+ return (mask & enumToMask_index<Mask, Enum>(index)) != 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+
+template<typename To, typename From>
+ConversionResult<To> convertIntegral(From from) {
+ // Special handling is required for signed / vs. unsigned comparisons, since otherwise we may
+ // have the signed converted to unsigned and produce wrong results.
+ if (std::is_signed_v<From> && !std::is_signed_v<To>) {
+ if (from < 0 || from > std::numeric_limits<To>::max()) {
+ return unexpected(BAD_VALUE);
+ }
+ } else if (std::is_signed_v<To> && !std::is_signed_v<From>) {
+ if (from > std::numeric_limits<To>::max()) {
+ return unexpected(BAD_VALUE);
+ }
+ } else {
+ if (from < std::numeric_limits<To>::min() || from > std::numeric_limits<To>::max()) {
+ return unexpected(BAD_VALUE);
+ }
+ }
+ return static_cast<To>(from);
+}
+
+template<typename To, typename From>
+ConversionResult<To> convertReinterpret(From from) {
+ static_assert(sizeof(From) == sizeof(To));
+ return static_cast<To>(from);
+}
+
+enum class Direction {
+ INPUT, OUTPUT
+};
+
+ConversionResult<Direction> direction(media::AudioPortRole role, media::AudioPortType type) {
+ switch (type) {
+ case media::AudioPortType::DEVICE:
+ switch (role) {
+ case media::AudioPortRole::SOURCE:
+ return Direction::INPUT;
+ case media::AudioPortRole::SINK:
+ return Direction::OUTPUT;
+ default:
+ break;
+ }
+ break;
+ case media::AudioPortType::MIX:
+ switch (role) {
+ case media::AudioPortRole::SOURCE:
+ return Direction::OUTPUT;
+ case media::AudioPortRole::SINK:
+ return Direction::INPUT;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+ConversionResult<Direction> direction(audio_port_role_t role, audio_port_type_t type) {
+ switch (type) {
+ case AUDIO_PORT_TYPE_DEVICE:
+ switch (role) {
+ case AUDIO_PORT_ROLE_SOURCE:
+ return Direction::INPUT;
+ case AUDIO_PORT_ROLE_SINK:
+ return Direction::OUTPUT;
+ default:
+ break;
+ }
+ break;
+ case AUDIO_PORT_TYPE_MIX:
+ switch (role) {
+ case AUDIO_PORT_ROLE_SOURCE:
+ return Direction::OUTPUT;
+ case AUDIO_PORT_ROLE_SINK:
+ return Direction::INPUT;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ return unexpected(BAD_VALUE);
+}
+
+} // namespace
+
+////////////////////////////////////////////////////////////////////////////////////////////////////
+// Converters
+
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<int> aidl2legacy_AudioPortConfigType(media::AudioPortConfigType aidl) {
+ switch (aidl) {
+ case media::AudioPortConfigType::SAMPLE_RATE:
+ return AUDIO_PORT_CONFIG_SAMPLE_RATE;
+ case media::AudioPortConfigType::CHANNEL_MASK:
+ return AUDIO_PORT_CONFIG_CHANNEL_MASK;
+ case media::AudioPortConfigType::FORMAT:
+ return AUDIO_PORT_CONFIG_FORMAT;
+ case media::AudioPortConfigType::FLAGS:
+ return AUDIO_PORT_CONFIG_FLAGS;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_AudioPortConfigType(int legacy) {
+ switch (legacy) {
+ case AUDIO_PORT_CONFIG_SAMPLE_RATE:
+ return media::AudioPortConfigType::SAMPLE_RATE;
+ case AUDIO_PORT_CONFIG_CHANNEL_MASK:
+ return media::AudioPortConfigType::CHANNEL_MASK;
+ case AUDIO_PORT_CONFIG_FORMAT:
+ return media::AudioPortConfigType::FORMAT;
+ case AUDIO_PORT_CONFIG_FLAGS:
+ return media::AudioPortConfigType::FLAGS;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl) {
+ return convertBitmask<unsigned int, int32_t, int, media::AudioPortConfigType>(
+ aidl, aidl2legacy_AudioPortConfigType,
+ // AudioPortConfigType enum is index-based.
+ index2enum_index<media::AudioPortConfigType>,
+ // AUDIO_PORT_CONFIG_* flags are mask-based.
+ enumToMask_bitmask<unsigned int, int>);
+}
+
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy) {
+ return convertBitmask<int32_t, unsigned int, media::AudioPortConfigType, int>(
+ legacy, legacy2aidl_AudioPortConfigType,
+ // AUDIO_PORT_CONFIG_* flags are mask-based.
+ index2enum_bitmask<unsigned>,
+ // AudioPortConfigType enum is index-based.
+ enumToMask_index<int32_t, media::AudioPortConfigType>);
+}
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl) {
+ // TODO(ytai): should we convert bit-by-bit?
+ // One problem here is that the representation is both opaque and is different based on the
+ // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+ return convertReinterpret<audio_channel_mask_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy) {
+ // TODO(ytai): should we convert bit-by-bit?
+ // One problem here is that the representation is both opaque and is different based on the
+ // context (input vs. output). Can determine based on type and role, as per useInChannelMask().
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+ media::AudioIoConfigEvent aidl) {
+ switch (aidl) {
+ case media::AudioIoConfigEvent::OUTPUT_REGISTERED:
+ return AUDIO_OUTPUT_REGISTERED;
+ case media::AudioIoConfigEvent::OUTPUT_OPENED:
+ return AUDIO_OUTPUT_OPENED;
+ case media::AudioIoConfigEvent::OUTPUT_CLOSED:
+ return AUDIO_OUTPUT_CLOSED;
+ case media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED:
+ return AUDIO_OUTPUT_CONFIG_CHANGED;
+ case media::AudioIoConfigEvent::INPUT_REGISTERED:
+ return AUDIO_INPUT_REGISTERED;
+ case media::AudioIoConfigEvent::INPUT_OPENED:
+ return AUDIO_INPUT_OPENED;
+ case media::AudioIoConfigEvent::INPUT_CLOSED:
+ return AUDIO_INPUT_CLOSED;
+ case media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED:
+ return AUDIO_INPUT_CONFIG_CHANGED;
+ case media::AudioIoConfigEvent::CLIENT_STARTED:
+ return AUDIO_CLIENT_STARTED;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+ audio_io_config_event legacy) {
+ switch (legacy) {
+ case AUDIO_OUTPUT_REGISTERED:
+ return media::AudioIoConfigEvent::OUTPUT_REGISTERED;
+ case AUDIO_OUTPUT_OPENED:
+ return media::AudioIoConfigEvent::OUTPUT_OPENED;
+ case AUDIO_OUTPUT_CLOSED:
+ return media::AudioIoConfigEvent::OUTPUT_CLOSED;
+ case AUDIO_OUTPUT_CONFIG_CHANGED:
+ return media::AudioIoConfigEvent::OUTPUT_CONFIG_CHANGED;
+ case AUDIO_INPUT_REGISTERED:
+ return media::AudioIoConfigEvent::INPUT_REGISTERED;
+ case AUDIO_INPUT_OPENED:
+ return media::AudioIoConfigEvent::INPUT_OPENED;
+ case AUDIO_INPUT_CLOSED:
+ return media::AudioIoConfigEvent::INPUT_CLOSED;
+ case AUDIO_INPUT_CONFIG_CHANGED:
+ return media::AudioIoConfigEvent::INPUT_CONFIG_CHANGED;
+ case AUDIO_CLIENT_STARTED:
+ return media::AudioIoConfigEvent::CLIENT_STARTED;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+ media::AudioPortRole aidl) {
+ switch (aidl) {
+ case media::AudioPortRole::NONE:
+ return AUDIO_PORT_ROLE_NONE;
+ case media::AudioPortRole::SOURCE:
+ return AUDIO_PORT_ROLE_SOURCE;
+ case media::AudioPortRole::SINK:
+ return AUDIO_PORT_ROLE_SINK;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+ audio_port_role_t legacy) {
+ switch (legacy) {
+ case AUDIO_PORT_ROLE_NONE:
+ return media::AudioPortRole::NONE;
+ case AUDIO_PORT_ROLE_SOURCE:
+ return media::AudioPortRole::SOURCE;
+ case AUDIO_PORT_ROLE_SINK:
+ return media::AudioPortRole::SINK;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+ media::AudioPortType aidl) {
+ switch (aidl) {
+ case media::AudioPortType::NONE:
+ return AUDIO_PORT_TYPE_NONE;
+ case media::AudioPortType::DEVICE:
+ return AUDIO_PORT_TYPE_DEVICE;
+ case media::AudioPortType::MIX:
+ return AUDIO_PORT_TYPE_MIX;
+ case media::AudioPortType::SESSION:
+ return AUDIO_PORT_TYPE_SESSION;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+ audio_port_type_t legacy) {
+ switch (legacy) {
+ case AUDIO_PORT_TYPE_NONE:
+ return media::AudioPortType::NONE;
+ case AUDIO_PORT_TYPE_DEVICE:
+ return media::AudioPortType::DEVICE;
+ case AUDIO_PORT_TYPE_MIX:
+ return media::AudioPortType::MIX;
+ case AUDIO_PORT_TYPE_SESSION:
+ return media::AudioPortType::SESSION;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+ media::audio::common::AudioFormat aidl) {
+ // This relies on AudioFormat being kept in sync with audio_format_t.
+ static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+ return static_cast<audio_format_t>(aidl);
+}
+
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+ audio_format_t legacy) {
+ // This relies on AudioFormat being kept in sync with audio_format_t.
+ static_assert(sizeof(media::audio::common::AudioFormat) == sizeof(audio_format_t));
+ return static_cast<media::audio::common::AudioFormat>(legacy);
+}
+
+ConversionResult<int> aidl2legacy_AudioGainMode_int(media::AudioGainMode aidl) {
+ switch (aidl) {
+ case media::AudioGainMode::JOINT:
+ return AUDIO_GAIN_MODE_JOINT;
+ case media::AudioGainMode::CHANNELS:
+ return AUDIO_GAIN_MODE_CHANNELS;
+ case media::AudioGainMode::RAMP:
+ return AUDIO_GAIN_MODE_RAMP;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioGainMode> legacy2aidl_int_AudioGainMode(int legacy) {
+ switch (legacy) {
+ case AUDIO_GAIN_MODE_JOINT:
+ return media::AudioGainMode::JOINT;
+ case AUDIO_GAIN_MODE_CHANNELS:
+ return media::AudioGainMode::CHANNELS;
+ case AUDIO_GAIN_MODE_RAMP:
+ return media::AudioGainMode::RAMP;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t(int32_t aidl) {
+ return convertBitmask<audio_gain_mode_t, int32_t, int, media::AudioGainMode>(
+ aidl, aidl2legacy_AudioGainMode_int,
+ // AudioGainMode is index-based.
+ index2enum_index<media::AudioGainMode>,
+ // AUDIO_GAIN_MODE_* constants are mask-based.
+ enumToMask_bitmask<audio_gain_mode_t, int>);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t(audio_gain_mode_t legacy) {
+ return convertBitmask<int32_t, audio_gain_mode_t, media::AudioGainMode, int>(
+ legacy, legacy2aidl_int_AudioGainMode,
+ // AUDIO_GAIN_MODE_* constants are mask-based.
+ index2enum_bitmask<int>,
+ // AudioGainMode is index-based.
+ enumToMask_index<int32_t, media::AudioGainMode>);
+}
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl) {
+ // TODO(ytai): bitfield?
+ return convertReinterpret<audio_devices_t>(aidl);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy) {
+ // TODO(ytai): bitfield?
+ return convertReinterpret<int32_t>(legacy);
+}
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+ const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type) {
+ audio_gain_config legacy;
+ legacy.index = VALUE_OR_RETURN(convertIntegral<int>(aidl.index));
+ legacy.mode = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_gain_mode_t(aidl.mode));
+ legacy.channel_mask =
+ VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+ const bool isJoint = bitmaskIsSet(aidl.mode, media::AudioGainMode::JOINT);
+ size_t numValues = isJoint ? 1
+ : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+ : audio_channel_count_from_out_mask(legacy.channel_mask);
+ if (aidl.values.size() != numValues || aidl.values.size() > std::size(legacy.values)) {
+ return unexpected(BAD_VALUE);
+ }
+ for (size_t i = 0; i < numValues; ++i) {
+ legacy.values[i] = VALUE_OR_RETURN(convertIntegral<int>(aidl.values[i]));
+ }
+ legacy.ramp_duration_ms = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.rampDurationMs));
+ return legacy;
+}
+
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+ const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type) {
+ media::AudioGainConfig aidl;
+ aidl.index = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.index));
+ aidl.mode = VALUE_OR_RETURN(legacy2aidl_audio_gain_mode_t_int32_t(legacy.mode));
+ aidl.channelMask =
+ VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ const bool isInput = VALUE_OR_RETURN(direction(role, type)) == Direction::INPUT;
+ const bool isJoint = (legacy.mode & AUDIO_GAIN_MODE_JOINT) != 0;
+ size_t numValues = isJoint ? 1
+ : isInput ? audio_channel_count_from_in_mask(legacy.channel_mask)
+ : audio_channel_count_from_out_mask(legacy.channel_mask);
+ aidl.values.resize(numValues);
+ for (size_t i = 0; i < numValues; ++i) {
+ aidl.values[i] = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.values[i]));
+ }
+ aidl.rampDurationMs = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.ramp_duration_ms));
+ return aidl;
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+ media::AudioInputFlags aidl) {
+ switch (aidl) {
+ case media::AudioInputFlags::FAST:
+ return AUDIO_INPUT_FLAG_FAST;
+ case media::AudioInputFlags::HW_HOTWORD:
+ return AUDIO_INPUT_FLAG_HW_HOTWORD;
+ case media::AudioInputFlags::RAW:
+ return AUDIO_INPUT_FLAG_RAW;
+ case media::AudioInputFlags::SYNC:
+ return AUDIO_INPUT_FLAG_SYNC;
+ case media::AudioInputFlags::MMAP_NOIRQ:
+ return AUDIO_INPUT_FLAG_MMAP_NOIRQ;
+ case media::AudioInputFlags::VOIP_TX:
+ return AUDIO_INPUT_FLAG_VOIP_TX;
+ case media::AudioInputFlags::HW_AV_SYNC:
+ return AUDIO_INPUT_FLAG_HW_AV_SYNC;
+ case media::AudioInputFlags::DIRECT:
+ return AUDIO_INPUT_FLAG_DIRECT;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+ audio_input_flags_t legacy) {
+ switch (legacy) {
+ case AUDIO_INPUT_FLAG_FAST:
+ return media::AudioInputFlags::FAST;
+ case AUDIO_INPUT_FLAG_HW_HOTWORD:
+ return media::AudioInputFlags::HW_HOTWORD;
+ case AUDIO_INPUT_FLAG_RAW:
+ return media::AudioInputFlags::RAW;
+ case AUDIO_INPUT_FLAG_SYNC:
+ return media::AudioInputFlags::SYNC;
+ case AUDIO_INPUT_FLAG_MMAP_NOIRQ:
+ return media::AudioInputFlags::MMAP_NOIRQ;
+ case AUDIO_INPUT_FLAG_VOIP_TX:
+ return media::AudioInputFlags::VOIP_TX;
+ case AUDIO_INPUT_FLAG_HW_AV_SYNC:
+ return media::AudioInputFlags::HW_AV_SYNC;
+ case AUDIO_INPUT_FLAG_DIRECT:
+ return media::AudioInputFlags::DIRECT;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+ media::AudioOutputFlags aidl) {
+ switch (aidl) {
+ case media::AudioOutputFlags::DIRECT:
+ return AUDIO_OUTPUT_FLAG_DIRECT;
+ case media::AudioOutputFlags::PRIMARY:
+ return AUDIO_OUTPUT_FLAG_PRIMARY;
+ case media::AudioOutputFlags::FAST:
+ return AUDIO_OUTPUT_FLAG_FAST;
+ case media::AudioOutputFlags::DEEP_BUFFER:
+ return AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ case media::AudioOutputFlags::COMPRESS_OFFLOAD:
+ return AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ case media::AudioOutputFlags::NON_BLOCKING:
+ return AUDIO_OUTPUT_FLAG_NON_BLOCKING;
+ case media::AudioOutputFlags::HW_AV_SYNC:
+ return AUDIO_OUTPUT_FLAG_HW_AV_SYNC;
+ case media::AudioOutputFlags::TTS:
+ return AUDIO_OUTPUT_FLAG_TTS;
+ case media::AudioOutputFlags::RAW:
+ return AUDIO_OUTPUT_FLAG_RAW;
+ case media::AudioOutputFlags::SYNC:
+ return AUDIO_OUTPUT_FLAG_SYNC;
+ case media::AudioOutputFlags::IEC958_NONAUDIO:
+ return AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO;
+ case media::AudioOutputFlags::DIRECT_PCM:
+ return AUDIO_OUTPUT_FLAG_DIRECT_PCM;
+ case media::AudioOutputFlags::MMAP_NOIRQ:
+ return AUDIO_OUTPUT_FLAG_MMAP_NOIRQ;
+ case media::AudioOutputFlags::VOIP_RX:
+ return AUDIO_OUTPUT_FLAG_VOIP_RX;
+ case media::AudioOutputFlags::INCALL_MUSIC:
+ return AUDIO_OUTPUT_FLAG_INCALL_MUSIC;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+ audio_output_flags_t legacy) {
+ switch (legacy) {
+ case AUDIO_OUTPUT_FLAG_DIRECT:
+ return media::AudioOutputFlags::DIRECT;
+ case AUDIO_OUTPUT_FLAG_PRIMARY:
+ return media::AudioOutputFlags::PRIMARY;
+ case AUDIO_OUTPUT_FLAG_FAST:
+ return media::AudioOutputFlags::FAST;
+ case AUDIO_OUTPUT_FLAG_DEEP_BUFFER:
+ return media::AudioOutputFlags::DEEP_BUFFER;
+ case AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD:
+ return media::AudioOutputFlags::COMPRESS_OFFLOAD;
+ case AUDIO_OUTPUT_FLAG_NON_BLOCKING:
+ return media::AudioOutputFlags::NON_BLOCKING;
+ case AUDIO_OUTPUT_FLAG_HW_AV_SYNC:
+ return media::AudioOutputFlags::HW_AV_SYNC;
+ case AUDIO_OUTPUT_FLAG_TTS:
+ return media::AudioOutputFlags::TTS;
+ case AUDIO_OUTPUT_FLAG_RAW:
+ return media::AudioOutputFlags::RAW;
+ case AUDIO_OUTPUT_FLAG_SYNC:
+ return media::AudioOutputFlags::SYNC;
+ case AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO:
+ return media::AudioOutputFlags::IEC958_NONAUDIO;
+ case AUDIO_OUTPUT_FLAG_DIRECT_PCM:
+ return media::AudioOutputFlags::DIRECT_PCM;
+ case AUDIO_OUTPUT_FLAG_MMAP_NOIRQ:
+ return media::AudioOutputFlags::MMAP_NOIRQ;
+ case AUDIO_OUTPUT_FLAG_VOIP_RX:
+ return media::AudioOutputFlags::VOIP_RX;
+ case AUDIO_OUTPUT_FLAG_INCALL_MUSIC:
+ return media::AudioOutputFlags::INCALL_MUSIC;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_input_flags_t> aidl2legacy_audio_input_flags_mask(int32_t aidl) {
+ using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+ LegacyMask converted = VALUE_OR_RETURN(
+ (convertBitmask<LegacyMask, int32_t, audio_input_flags_t, media::AudioInputFlags>(
+ aidl, aidl2legacy_AudioInputFlags_audio_input_flags_t,
+ index2enum_index<media::AudioInputFlags>,
+ enumToMask_bitmask<LegacyMask, audio_input_flags_t>)));
+ return static_cast<audio_input_flags_t>(converted);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_mask(audio_input_flags_t legacy) {
+ using LegacyMask = std::underlying_type_t<audio_input_flags_t>;
+
+ LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+ return convertBitmask<int32_t, LegacyMask, media::AudioInputFlags, audio_input_flags_t>(
+ legacyMask, legacy2aidl_audio_input_flags_t_AudioInputFlags,
+ index2enum_bitmask<audio_input_flags_t>,
+ enumToMask_index<int32_t, media::AudioInputFlags>);
+}
+
+ConversionResult<audio_output_flags_t> aidl2legacy_audio_output_flags_mask(int32_t aidl) {
+ using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
+
+ LegacyMask converted = VALUE_OR_RETURN(
+ (convertBitmask<LegacyMask, int32_t, audio_output_flags_t, media::AudioOutputFlags>(
+ aidl, aidl2legacy_AudioOutputFlags_audio_output_flags_t,
+ index2enum_index<media::AudioOutputFlags>,
+ enumToMask_bitmask<LegacyMask, audio_output_flags_t>)));
+ return convertReinterpret<audio_output_flags_t>(converted);
+}
+
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_mask(audio_output_flags_t legacy) {
+ using LegacyMask = std::underlying_type_t<audio_output_flags_t>;
+
+ LegacyMask legacyMask = static_cast<LegacyMask>(legacy);
+ return convertBitmask<int32_t, LegacyMask, media::AudioOutputFlags, audio_output_flags_t>(
+ legacyMask, legacy2aidl_audio_output_flags_t_AudioOutputFlags,
+ index2enum_bitmask<audio_output_flags_t>,
+ enumToMask_index<int32_t, media::AudioOutputFlags>);
+}
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+ const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type) {
+ audio_io_flags legacy;
+ // Our way of representing a union in AIDL is to have multiple vectors and require that at most
+ // one of the them has size 1 and the rest are empty.
+ size_t totalSize = aidl.input.size() + aidl.output.size();
+ if (totalSize > 1) {
+ return unexpected(BAD_VALUE);
+ }
+
+ Direction dir = VALUE_OR_RETURN(direction(role, type));
+ switch (dir) {
+ case Direction::INPUT:
+ if (aidl.input.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.input = VALUE_OR_RETURN(aidl2legacy_audio_input_flags_mask(aidl.input[0]));
+ break;
+
+ case Direction::OUTPUT:
+ if (aidl.output.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.output = VALUE_OR_RETURN(aidl2legacy_audio_output_flags_mask(aidl.output[0]));
+ break;
+ }
+
+ return legacy;
+}
+
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type) {
+ media::AudioIoFlags aidl;
+
+ Direction dir = VALUE_OR_RETURN(direction(role, type));
+ switch (dir) {
+ case Direction::INPUT:
+ aidl.input.push_back(VALUE_OR_RETURN(legacy2aidl_audio_input_flags_mask(legacy.input)));
+ break;
+ case Direction::OUTPUT:
+ aidl.output.push_back(
+ VALUE_OR_RETURN(legacy2aidl_audio_output_flags_mask(legacy.output)));
+ break;
+ }
+ return aidl;
+}
+
+ConversionResult<audio_port_config_device_ext> aidl2legacy_AudioPortConfigDeviceExt(
+ const media::AudioPortConfigDeviceExt& aidl) {
+ audio_port_config_device_ext legacy;
+ legacy.hw_module = VALUE_OR_RETURN(convertReinterpret<audio_module_handle_t>(aidl.hwModule));
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_devices_t(aidl.type));
+ if (aidl.address.size() > AUDIO_DEVICE_MAX_ADDRESS_LEN - 1) {
+ return unexpected(BAD_VALUE);
+ }
+ std::strcpy(legacy.address, aidl.address.c_str());
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigDeviceExt> legacy2aidl_AudioPortConfigDeviceExt(
+ const audio_port_config_device_ext& legacy) {
+ media::AudioPortConfigDeviceExt aidl;
+ aidl.hwModule = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.hw_module));
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_devices_t_int32_t(legacy.type));
+
+ if (strnlen(legacy.address, AUDIO_DEVICE_MAX_ADDRESS_LEN) == AUDIO_DEVICE_MAX_ADDRESS_LEN) {
+ // No null-terminator.
+ return unexpected(BAD_VALUE);
+ }
+ aidl.address = legacy.address;
+ return aidl;
+}
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+ media::AudioStreamType aidl) {
+ switch (aidl) {
+ case media::AudioStreamType::DEFAULT:
+ return AUDIO_STREAM_DEFAULT;
+ case media::AudioStreamType::VOICE_CALL:
+ return AUDIO_STREAM_VOICE_CALL;
+ case media::AudioStreamType::SYSTEM:
+ return AUDIO_STREAM_SYSTEM;
+ case media::AudioStreamType::RING:
+ return AUDIO_STREAM_RING;
+ case media::AudioStreamType::MUSIC:
+ return AUDIO_STREAM_MUSIC;
+ case media::AudioStreamType::ALARM:
+ return AUDIO_STREAM_ALARM;
+ case media::AudioStreamType::NOTIFICATION:
+ return AUDIO_STREAM_NOTIFICATION;
+ case media::AudioStreamType::BLUETOOTH_SCO:
+ return AUDIO_STREAM_BLUETOOTH_SCO;
+ case media::AudioStreamType::ENFORCED_AUDIBLE:
+ return AUDIO_STREAM_ENFORCED_AUDIBLE;
+ case media::AudioStreamType::DTMF:
+ return AUDIO_STREAM_DTMF;
+ case media::AudioStreamType::TTS:
+ return AUDIO_STREAM_TTS;
+ case media::AudioStreamType::ACCESSIBILITY:
+ return AUDIO_STREAM_ACCESSIBILITY;
+ case media::AudioStreamType::ASSISTANT:
+ return AUDIO_STREAM_ASSISTANT;
+ case media::AudioStreamType::REROUTING:
+ return AUDIO_STREAM_REROUTING;
+ case media::AudioStreamType::PATCH:
+ return AUDIO_STREAM_PATCH;
+ case media::AudioStreamType::CALL_ASSISTANT:
+ return AUDIO_STREAM_CALL_ASSISTANT;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+ audio_stream_type_t legacy) {
+ switch (legacy) {
+ case AUDIO_STREAM_DEFAULT:
+ return media::AudioStreamType::DEFAULT;
+ case AUDIO_STREAM_VOICE_CALL:
+ return media::AudioStreamType::VOICE_CALL;
+ case AUDIO_STREAM_SYSTEM:
+ return media::AudioStreamType::SYSTEM;
+ case AUDIO_STREAM_RING:
+ return media::AudioStreamType::RING;
+ case AUDIO_STREAM_MUSIC:
+ return media::AudioStreamType::MUSIC;
+ case AUDIO_STREAM_ALARM:
+ return media::AudioStreamType::ALARM;
+ case AUDIO_STREAM_NOTIFICATION:
+ return media::AudioStreamType::NOTIFICATION;
+ case AUDIO_STREAM_BLUETOOTH_SCO:
+ return media::AudioStreamType::BLUETOOTH_SCO;
+ case AUDIO_STREAM_ENFORCED_AUDIBLE:
+ return media::AudioStreamType::ENFORCED_AUDIBLE;
+ case AUDIO_STREAM_DTMF:
+ return media::AudioStreamType::DTMF;
+ case AUDIO_STREAM_TTS:
+ return media::AudioStreamType::TTS;
+ case AUDIO_STREAM_ACCESSIBILITY:
+ return media::AudioStreamType::ACCESSIBILITY;
+ case AUDIO_STREAM_ASSISTANT:
+ return media::AudioStreamType::ASSISTANT;
+ case AUDIO_STREAM_REROUTING:
+ return media::AudioStreamType::REROUTING;
+ case AUDIO_STREAM_PATCH:
+ return media::AudioStreamType::PATCH;
+ case AUDIO_STREAM_CALL_ASSISTANT:
+ return media::AudioStreamType::CALL_ASSISTANT;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+ media::AudioSourceType aidl) {
+ switch (aidl) {
+ case media::AudioSourceType::DEFAULT:
+ return AUDIO_SOURCE_DEFAULT;
+ case media::AudioSourceType::MIC:
+ return AUDIO_SOURCE_MIC;
+ case media::AudioSourceType::VOICE_UPLINK:
+ return AUDIO_SOURCE_VOICE_UPLINK;
+ case media::AudioSourceType::VOICE_DOWNLINK:
+ return AUDIO_SOURCE_VOICE_DOWNLINK;
+ case media::AudioSourceType::VOICE_CALL:
+ return AUDIO_SOURCE_VOICE_CALL;
+ case media::AudioSourceType::CAMCORDER:
+ return AUDIO_SOURCE_CAMCORDER;
+ case media::AudioSourceType::VOICE_RECOGNITION:
+ return AUDIO_SOURCE_VOICE_RECOGNITION;
+ case media::AudioSourceType::VOICE_COMMUNICATION:
+ return AUDIO_SOURCE_VOICE_COMMUNICATION;
+ case media::AudioSourceType::REMOTE_SUBMIX:
+ return AUDIO_SOURCE_REMOTE_SUBMIX;
+ case media::AudioSourceType::UNPROCESSED:
+ return AUDIO_SOURCE_UNPROCESSED;
+ case media::AudioSourceType::VOICE_PERFORMANCE:
+ return AUDIO_SOURCE_VOICE_PERFORMANCE;
+ case media::AudioSourceType::ECHO_REFERENCE:
+ return AUDIO_SOURCE_ECHO_REFERENCE;
+ case media::AudioSourceType::FM_TUNER:
+ return AUDIO_SOURCE_FM_TUNER;
+ case media::AudioSourceType::HOTWORD:
+ return AUDIO_SOURCE_HOTWORD;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+ audio_source_t legacy) {
+ switch (legacy) {
+ case AUDIO_SOURCE_DEFAULT:
+ return media::AudioSourceType::DEFAULT;
+ case AUDIO_SOURCE_MIC:
+ return media::AudioSourceType::MIC;
+ case AUDIO_SOURCE_VOICE_UPLINK:
+ return media::AudioSourceType::VOICE_UPLINK;
+ case AUDIO_SOURCE_VOICE_DOWNLINK:
+ return media::AudioSourceType::VOICE_DOWNLINK;
+ case AUDIO_SOURCE_VOICE_CALL:
+ return media::AudioSourceType::VOICE_CALL;
+ case AUDIO_SOURCE_CAMCORDER:
+ return media::AudioSourceType::CAMCORDER;
+ case AUDIO_SOURCE_VOICE_RECOGNITION:
+ return media::AudioSourceType::VOICE_RECOGNITION;
+ case AUDIO_SOURCE_VOICE_COMMUNICATION:
+ return media::AudioSourceType::VOICE_COMMUNICATION;
+ case AUDIO_SOURCE_REMOTE_SUBMIX:
+ return media::AudioSourceType::REMOTE_SUBMIX;
+ case AUDIO_SOURCE_UNPROCESSED:
+ return media::AudioSourceType::UNPROCESSED;
+ case AUDIO_SOURCE_VOICE_PERFORMANCE:
+ return media::AudioSourceType::VOICE_PERFORMANCE;
+ case AUDIO_SOURCE_ECHO_REFERENCE:
+ return media::AudioSourceType::ECHO_REFERENCE;
+ case AUDIO_SOURCE_FM_TUNER:
+ return media::AudioSourceType::FM_TUNER;
+ case AUDIO_SOURCE_HOTWORD:
+ return media::AudioSourceType::HOTWORD;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<audio_session_t> aidl2legacy_AudioSessionType_audio_session_t(
+ media::AudioSessionType aidl) {
+ switch (aidl) {
+ case media::AudioSessionType::DEVICE:
+ return AUDIO_SESSION_DEVICE;
+ case media::AudioSessionType::OUTPUT_STAGE:
+ return AUDIO_SESSION_OUTPUT_STAGE;
+ case media::AudioSessionType::OUTPUT_MIX:
+ return AUDIO_SESSION_OUTPUT_MIX;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+ConversionResult<media::AudioSessionType> legacy2aidl_audio_session_t_AudioSessionType(
+ audio_session_t legacy) {
+ switch (legacy) {
+ case AUDIO_SESSION_DEVICE:
+ return media::AudioSessionType::DEVICE;
+ case AUDIO_SESSION_OUTPUT_STAGE:
+ return media::AudioSessionType::OUTPUT_STAGE;
+ case AUDIO_SESSION_OUTPUT_MIX:
+ return media::AudioSessionType::OUTPUT_MIX;
+ default:
+ return unexpected(BAD_VALUE);
+ }
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_mix_ext_usecase = decltype(audio_port_config_mix_ext::usecase);
+
+ConversionResult<audio_port_config_mix_ext_usecase> aidl2legacy_AudioPortConfigMixExtUseCase(
+ const media::AudioPortConfigMixExtUseCase& aidl, media::AudioPortRole role) {
+ audio_port_config_mix_ext_usecase legacy;
+
+ // Our way of representing a union in AIDL is to have multiple vectors and require that exactly
+ // one of the them has size 1 and the rest are empty.
+ size_t totalSize = aidl.stream.size() + aidl.source.size();
+ if (totalSize > 1) {
+ return unexpected(BAD_VALUE);
+ }
+
+ switch (role) {
+ case media::AudioPortRole::NONE:
+ if (totalSize != 0) {
+ return unexpected(BAD_VALUE);
+ }
+ break;
+
+ case media::AudioPortRole::SOURCE:
+ // This is not a bug. A SOURCE role corresponds to the stream field.
+ if (aidl.stream.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.stream = VALUE_OR_RETURN(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.stream[0]));
+ break;
+
+ case media::AudioPortRole::SINK:
+ // This is not a bug. A SINK role corresponds to the source field.
+ if (aidl.source.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.source =
+ VALUE_OR_RETURN(aidl2legacy_AudioSourceType_audio_source_t(aidl.source[0]));
+ break;
+
+ default:
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ }
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigMixExtUseCase> legacy2aidl_AudioPortConfigMixExtUseCase(
+ const audio_port_config_mix_ext_usecase& legacy, audio_port_role_t role) {
+ media::AudioPortConfigMixExtUseCase aidl;
+
+ switch (role) {
+ case AUDIO_PORT_ROLE_NONE:
+ break;
+ case AUDIO_PORT_ROLE_SOURCE:
+ // This is not a bug. A SOURCE role corresponds to the stream field.
+ aidl.stream.push_back(VALUE_OR_RETURN(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(
+ legacy.stream)));
+ break;
+ case AUDIO_PORT_ROLE_SINK:
+ // This is not a bug. A SINK role corresponds to the source field.
+ aidl.source.push_back(
+ VALUE_OR_RETURN(legacy2aidl_audio_source_t_AudioSourceType(legacy.source)));
+ break;
+ default:
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ }
+ return aidl;
+}
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+ const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role) {
+ audio_port_config_mix_ext legacy;
+ legacy.hw_module = VALUE_OR_RETURN(convertReinterpret<audio_module_handle_t>(aidl.hwModule));
+ legacy.handle = VALUE_OR_RETURN(convertReinterpret<audio_io_handle_t>(aidl.handle));
+ legacy.usecase = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigMixExtUseCase(aidl.usecase, role));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+ const audio_port_config_mix_ext& legacy, audio_port_role_t role) {
+ media::AudioPortConfigMixExt aidl;
+ aidl.hwModule = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.hw_module));
+ aidl.handle = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.handle));
+ aidl.usecase = VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExtUseCase(legacy.usecase, role));
+ return aidl;
+}
+
+ConversionResult<audio_port_config_session_ext> aidl2legacy_AudioPortConfigSessionExt(
+ const media::AudioPortConfigSessionExt& aidl) {
+ audio_port_config_session_ext legacy;
+ legacy.session = VALUE_OR_RETURN(aidl2legacy_AudioSessionType_audio_session_t(aidl.session));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigSessionExt> legacy2aidl_AudioPortConfigSessionExt(
+ const audio_port_config_session_ext& legacy) {
+ media::AudioPortConfigSessionExt aidl;
+ aidl.session = VALUE_OR_RETURN(legacy2aidl_audio_session_t_AudioSessionType(legacy.session));
+ return aidl;
+}
+
+// This type is unnamed in the original definition, thus we name it here.
+using audio_port_config_ext = decltype(audio_port_config::ext);
+
+ConversionResult<audio_port_config_ext> aidl2legacy_AudioPortConfigExt(
+ const media::AudioPortConfigExt& aidl, media::AudioPortType type,
+ media::AudioPortRole role) {
+ audio_port_config_ext legacy;
+ // Our way of representing a union in AIDL is to have multiple vectors and require that at most
+ // one of the them has size 1 and the rest are empty.
+ size_t totalSize = aidl.device.size() + aidl.mix.size() + aidl.session.size();
+ if (totalSize > 1) {
+ return unexpected(BAD_VALUE);
+ }
+ switch (type) {
+ case media::AudioPortType::NONE:
+ if (totalSize != 0) {
+ return unexpected(BAD_VALUE);
+ }
+ break;
+ case media::AudioPortType::DEVICE:
+ if (aidl.device.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.device = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigDeviceExt(aidl.device[0]));
+ break;
+ case media::AudioPortType::MIX:
+ if (aidl.mix.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.mix = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigMixExt(aidl.mix[0], role));
+ break;
+ case media::AudioPortType::SESSION:
+ if (aidl.session.empty()) {
+ return unexpected(BAD_VALUE);
+ }
+ legacy.session =
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfigSessionExt(aidl.session[0]));
+ break;
+ default:
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ }
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfigExt> legacy2aidl_AudioPortConfigExt(
+ const audio_port_config_ext& legacy, audio_port_type_t type, audio_port_role_t role) {
+ media::AudioPortConfigExt aidl;
+
+ switch (type) {
+ case AUDIO_PORT_TYPE_NONE:
+ break;
+ case AUDIO_PORT_TYPE_DEVICE:
+ aidl.device.push_back(
+ VALUE_OR_RETURN(legacy2aidl_AudioPortConfigDeviceExt(legacy.device)));
+ break;
+ case AUDIO_PORT_TYPE_MIX:
+ aidl.mix.push_back(
+ VALUE_OR_RETURN(legacy2aidl_AudioPortConfigMixExt(legacy.mix, role)));
+ break;
+ case AUDIO_PORT_TYPE_SESSION:
+ aidl.session.push_back(
+ VALUE_OR_RETURN(legacy2aidl_AudioPortConfigSessionExt(legacy.session)));
+ break;
+ default:
+ LOG_ALWAYS_FATAL("Shouldn't get here");
+ }
+ return aidl;
+}
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+ const media::AudioPortConfig& aidl) {
+ audio_port_config legacy;
+ legacy.id = VALUE_OR_RETURN(convertReinterpret<audio_port_handle_t>(aidl.id));
+ legacy.role = VALUE_OR_RETURN(aidl2legacy_AudioPortRole_audio_port_role_t(aidl.role));
+ legacy.type = VALUE_OR_RETURN(aidl2legacy_AudioPortType_audio_port_type_t(aidl.type));
+ legacy.config_mask = VALUE_OR_RETURN(aidl2legacy_int32_t_config_mask(aidl.configMask));
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::SAMPLE_RATE)) {
+ legacy.sample_rate = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sampleRate));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::CHANNEL_MASK)) {
+ legacy.channel_mask =
+ VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FORMAT)) {
+ legacy.format = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::GAIN)) {
+ legacy.gain = VALUE_OR_RETURN(
+ aidl2legacy_AudioGainConfig_audio_gain_config(aidl.gain, aidl.role, aidl.type));
+ }
+ if (bitmaskIsSet(aidl.configMask, media::AudioPortConfigType::FLAGS)) {
+ legacy.flags = VALUE_OR_RETURN(
+ aidl2legacy_AudioIoFlags_audio_io_flags(aidl.flags, aidl.role, aidl.type));
+ }
+ legacy.ext = VALUE_OR_RETURN(aidl2legacy_AudioPortConfigExt(aidl.ext, aidl.type, aidl.role));
+ return legacy;
+}
+
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy) {
+ media::AudioPortConfig aidl;
+ aidl.id = VALUE_OR_RETURN(convertReinterpret<audio_port_handle_t>(legacy.id));
+ aidl.role = VALUE_OR_RETURN(legacy2aidl_audio_port_role_t_AudioPortRole(legacy.role));
+ aidl.type = VALUE_OR_RETURN(legacy2aidl_audio_port_type_t_AudioPortType(legacy.type));
+ aidl.configMask = VALUE_OR_RETURN(legacy2aidl_config_mask_int32_t(legacy.config_mask));
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) {
+ aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy.sample_rate));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) {
+ aidl.channelMask =
+ VALUE_OR_RETURN(legacy2aidl_audio_channel_mask_t_int32_t(legacy.channel_mask));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_FORMAT) {
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy.format));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_GAIN) {
+ aidl.gain = VALUE_OR_RETURN(legacy2aidl_audio_gain_config_AudioGainConfig(
+ legacy.gain, legacy.role, legacy.type));
+ }
+ if (legacy.config_mask & AUDIO_PORT_CONFIG_FLAGS) {
+ aidl.flags = VALUE_OR_RETURN(
+ legacy2aidl_audio_io_flags_AudioIoFlags(legacy.flags, legacy.role, legacy.type));
+ }
+ aidl.ext =
+ VALUE_OR_RETURN(legacy2aidl_AudioPortConfigExt(legacy.ext, legacy.type, legacy.role));
+ return aidl;
+}
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+ const media::AudioPatch& aidl) {
+ struct audio_patch legacy;
+ legacy.id = VALUE_OR_RETURN(convertReinterpret<audio_patch_handle_t>(aidl.id));
+ legacy.num_sinks = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sinks.size()));
+ if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (size_t i = 0; i < legacy.num_sinks; ++i) {
+ legacy.sinks[i] =
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sinks[i]));
+ }
+ legacy.num_sources = VALUE_OR_RETURN(convertIntegral<unsigned int>(aidl.sources.size()));
+ if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (size_t i = 0; i < legacy.num_sources; ++i) {
+ legacy.sources[i] =
+ VALUE_OR_RETURN(aidl2legacy_AudioPortConfig_audio_port_config(aidl.sources[i]));
+ }
+ return legacy;
+}
+
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ const struct audio_patch& legacy) {
+ media::AudioPatch aidl;
+ aidl.id = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy.id));
+
+ if (legacy.num_sinks > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (unsigned int i = 0; i < legacy.num_sinks; ++i) {
+ aidl.sinks.push_back(
+ VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sinks[i])));
+ }
+ if (legacy.num_sources > AUDIO_PATCH_PORTS_MAX) {
+ return unexpected(BAD_VALUE);
+ }
+ for (unsigned int i = 0; i < legacy.num_sources; ++i) {
+ aidl.sources.push_back(
+ VALUE_OR_RETURN(legacy2aidl_audio_port_config_AudioPortConfig(legacy.sources[i])));
+ }
+ return aidl;
+}
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+ const media::AudioIoDescriptor& aidl) {
+ sp<AudioIoDescriptor> legacy(new AudioIoDescriptor());
+ legacy->mIoHandle = VALUE_OR_RETURN(convertReinterpret<audio_io_handle_t>(aidl.ioHandle));
+ legacy->mPatch = VALUE_OR_RETURN(aidl2legacy_AudioPatch_audio_patch(aidl.patch));
+ legacy->mSamplingRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.samplingRate));
+ legacy->mFormat = VALUE_OR_RETURN(aidl2legacy_AudioFormat_audio_format_t(aidl.format));
+ legacy->mChannelMask =
+ VALUE_OR_RETURN(aidl2legacy_int32_t_audio_channel_mask_t(aidl.channelMask));
+ legacy->mFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCount));
+ legacy->mFrameCountHAL = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.frameCountHAL));
+ legacy->mLatency = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.latency));
+ legacy->mPortId = VALUE_OR_RETURN(convertReinterpret<audio_port_handle_t>(aidl.portId));
+ return legacy;
+}
+
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+ const sp<AudioIoDescriptor>& legacy) {
+ media::AudioIoDescriptor aidl;
+ aidl.ioHandle = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy->mIoHandle));
+ aidl.patch = VALUE_OR_RETURN(legacy2aidl_audio_patch_AudioPatch(legacy->mPatch));
+ aidl.samplingRate = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mSamplingRate));
+ aidl.format = VALUE_OR_RETURN(legacy2aidl_audio_format_t_AudioFormat(legacy->mFormat));
+ aidl.channelMask = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy->mChannelMask));
+ aidl.frameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCount));
+ aidl.frameCountHAL = VALUE_OR_RETURN(convertIntegral<int64_t>(legacy->mFrameCountHAL));
+ aidl.latency = VALUE_OR_RETURN(convertIntegral<int32_t>(legacy->mLatency));
+ aidl.portId = VALUE_OR_RETURN(convertReinterpret<int32_t>(legacy->mPortId));
+ return aidl;
+}
+
+} // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index d7e9461..fef0ca9 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -2,6 +2,7 @@
name: "libaudioclient_headers",
vendor_available: true,
min_sdk_version: "29",
+ host_supported: true,
header_libs: [
"libaudiofoundation_headers",
@@ -12,7 +13,12 @@
export_header_lib_headers: [
"libaudiofoundation_headers",
],
- host_supported: true,
+ static_libs: [
+ "audioflinger-aidl-unstable-cpp",
+ ],
+ export_static_lib_headers: [
+ "audioflinger-aidl-unstable-cpp",
+ ],
target: {
darwin: {
enabled: false,
@@ -29,6 +35,7 @@
"AudioVolumeGroup.cpp",
],
shared_libs: [
+ "audioflinger-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
"libaudiofoundation",
"libaudioutils",
@@ -44,6 +51,7 @@
include_dirs: ["system/media/audio_utils/include"],
export_include_dirs: ["include"],
export_shared_lib_headers: [
+ "audioflinger-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
],
}
@@ -73,7 +81,6 @@
"AudioTrack.cpp",
"AudioTrackShared.cpp",
"IAudioFlinger.cpp",
- "IAudioFlingerClient.cpp",
"IAudioPolicyService.cpp",
"IAudioPolicyServiceClient.cpp",
"IAudioTrack.cpp",
@@ -83,7 +90,9 @@
"TrackPlayerBase.cpp",
],
shared_libs: [
+ "audioflinger-aidl-unstable-cpp",
"capture_state_listener-aidl-cpp",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libaudioutils",
"libaudiopolicy",
@@ -101,7 +110,10 @@
"libutils",
"libvibrator",
],
- export_shared_lib_headers: ["libbinder"],
+ export_shared_lib_headers: [
+ "audioflinger-aidl-unstable-cpp",
+ "libbinder",
+ ],
include_dirs: [
"frameworks/av/media/libnbaio/include_mono/",
@@ -140,6 +152,32 @@
},
}
+cc_library_shared {
+ name: "libaudioclient_aidl_conversion",
+ srcs: ["AidlConversion.cpp"],
+ local_include_dirs: ["include"],
+ shared_libs: [
+ "audioclient-types-aidl-unstable-cpp",
+ "libbase",
+ "liblog",
+ "libutils",
+ ],
+ export_shared_lib_headers: [
+ "audioclient-types-aidl-unstable-cpp",
+ ],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wno-error=deprecated-declarations",
+ ],
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
+
// AIDL interface between libaudioclient and framework.jar
filegroup {
name: "libaudioclient_aidl",
@@ -189,3 +227,61 @@
"shared-file-region-aidl",
],
}
+
+aidl_interface {
+ name: "audioclient-types-aidl",
+ unstable: true,
+ host_supported: true,
+ vendor_available: true,
+ double_loadable: true,
+ local_include_dir: "aidl",
+ srcs: [
+ "aidl/android/media/AudioGainConfig.aidl",
+ "aidl/android/media/AudioGainMode.aidl",
+ "aidl/android/media/AudioInputFlags.aidl",
+ "aidl/android/media/AudioIoConfigEvent.aidl",
+ "aidl/android/media/AudioIoDescriptor.aidl",
+ "aidl/android/media/AudioIoFlags.aidl",
+ "aidl/android/media/AudioOutputFlags.aidl",
+ "aidl/android/media/AudioPatch.aidl",
+ "aidl/android/media/AudioPortConfig.aidl",
+ "aidl/android/media/AudioPortConfigType.aidl",
+ "aidl/android/media/AudioPortConfigDeviceExt.aidl",
+ "aidl/android/media/AudioPortConfigExt.aidl",
+ "aidl/android/media/AudioPortConfigMixExt.aidl",
+ "aidl/android/media/AudioPortConfigMixExtUseCase.aidl",
+ "aidl/android/media/AudioPortConfigSessionExt.aidl",
+ "aidl/android/media/AudioPortRole.aidl",
+ "aidl/android/media/AudioPortType.aidl",
+ "aidl/android/media/AudioSessionType.aidl",
+ "aidl/android/media/AudioSourceType.aidl",
+ "aidl/android/media/AudioStreamType.aidl",
+ ],
+ imports: [
+ "audio_common-aidl",
+ ],
+}
+
+aidl_interface {
+ name: "audioflinger-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ host_supported: true,
+ vendor_available: true,
+ srcs: [
+ "aidl/android/media/IAudioFlingerClient.aidl",
+ ],
+ imports: [
+ "audioclient-types-aidl",
+ ],
+ double_loadable: true,
+ backend: {
+ cpp: {
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ },
+ },
+}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index edb0889..0507879 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -23,6 +23,7 @@
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <binder/IPCThreadState.h>
+#include <media/AidlConversion.h>
#include <media/AudioResamplerPublic.h>
#include <media/AudioSystem.h>
#include <media/IAudioFlinger.h>
@@ -32,10 +33,17 @@
#include <system/audio.h>
+#define VALUE_OR_RETURN(x) \
+ ({ auto _tmp = (x); \
+ if (!_tmp.ok()) return Status::fromStatusT(_tmp.error()); \
+ _tmp.value(); })
+
// ----------------------------------------------------------------------------
namespace android {
+using binder::Status;
+
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
Mutex AudioSystem::gLockErrorCallbacks;
@@ -521,11 +529,17 @@
ALOGW("AudioFlinger server died!");
}
-void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event event,
- const sp<AudioIoDescriptor>& ioDesc) {
+Status AudioSystem::AudioFlingerClient::ioConfigChanged(
+ media::AudioIoConfigEvent _event,
+ const media::AudioIoDescriptor& _ioDesc) {
+ audio_io_config_event event = VALUE_OR_RETURN(
+ aidl2legacy_AudioIoConfigEvent_audio_io_config_event(_event));
+ sp<AudioIoDescriptor> ioDesc(
+ VALUE_OR_RETURN(aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(_ioDesc)));
+
ALOGV("ioConfigChanged() event %d", event);
- if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
+ if (ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return Status::ok();
audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
std::vector<sp<AudioDeviceCallback>> callbacksToCall;
@@ -640,6 +654,8 @@
// If callbacksToCall is not empty, it implies ioDesc->mIoHandle and deviceId are valid
cb->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
}
+
+ return Status::ok();
}
status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 7c304a1..d86182e 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -373,7 +373,7 @@
return reply.readString8();
}
- virtual void registerClient(const sp<IAudioFlingerClient>& client)
+ virtual void registerClient(const sp<media::IAudioFlingerClient>& client)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
@@ -1213,7 +1213,7 @@
case REGISTER_CLIENT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(
+ sp<media::IAudioFlingerClient> client = interface_cast<media::IAudioFlingerClient>(
data.readStrongBinder());
registerClient(client);
return NO_ERROR;
diff --git a/media/libaudioclient/IAudioFlingerClient.cpp b/media/libaudioclient/IAudioFlingerClient.cpp
deleted file mode 100644
index 47eb7dc..0000000
--- a/media/libaudioclient/IAudioFlingerClient.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "IAudioFlingerClient"
-#include <utils/Log.h>
-
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-
-#include <media/IAudioFlingerClient.h>
-#include <media/AudioSystem.h>
-
-namespace android {
-
-enum {
- IO_CONFIG_CHANGED = IBinder::FIRST_CALL_TRANSACTION
-};
-
-class BpAudioFlingerClient : public BpInterface<IAudioFlingerClient>
-{
-public:
- explicit BpAudioFlingerClient(const sp<IBinder>& impl)
- : BpInterface<IAudioFlingerClient>(impl)
- {
- }
-
- void ioConfigChanged(audio_io_config_event event, const sp<AudioIoDescriptor>& ioDesc)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());
- data.writeInt32(event);
- data.writeInt32((int32_t)ioDesc->mIoHandle);
- data.write(&ioDesc->mPatch, sizeof(struct audio_patch));
- data.writeInt32(ioDesc->mSamplingRate);
- data.writeInt32(ioDesc->mFormat);
- data.writeInt32(ioDesc->mChannelMask);
- data.writeInt64(ioDesc->mFrameCount);
- data.writeInt64(ioDesc->mFrameCountHAL);
- data.writeInt32(ioDesc->mLatency);
- data.writeInt32(ioDesc->mPortId);
- remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-};
-
-IMPLEMENT_META_INTERFACE(AudioFlingerClient, "android.media.IAudioFlingerClient");
-
-// ----------------------------------------------------------------------
-
-status_t BnAudioFlingerClient::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch (code) {
- case IO_CONFIG_CHANGED: {
- CHECK_INTERFACE(IAudioFlingerClient, data, reply);
- audio_io_config_event event = (audio_io_config_event)data.readInt32();
- sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor();
- ioDesc->mIoHandle = (audio_io_handle_t) data.readInt32();
- data.read(&ioDesc->mPatch, sizeof(struct audio_patch));
- ioDesc->mSamplingRate = data.readInt32();
- ioDesc->mFormat = (audio_format_t) data.readInt32();
- ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32();
- ioDesc->mFrameCount = data.readInt64();
- ioDesc->mFrameCountHAL = data.readInt64();
- ioDesc->mLatency = data.readInt32();
- ioDesc->mPortId = data.readInt32();
- ioConfigChanged(event, ioDesc);
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-} // namespace android
diff --git a/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
new file mode 100644
index 0000000..b93c2dc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainConfig.aidl
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioGainConfig {
+ /** Index of the corresponding audio_gain in the audio_port gains[] table. */
+ int index;
+
+ /** Mode requested for this command. Bitfield indexed by AudioGainMode. */
+ int mode;
+
+ /**
+ * Channels which gain value follows. N/A in joint mode.
+ * Interpreted as audio_channel_mask_t.
+ */
+ int channelMask;
+
+ /**
+ * Gain values in millibels.
+ * For each channel ordered from LSb to MSb in channel mask. The number of values is 1 in joint
+ * mode, otherwise equals the number of bits implied by channelMask.
+ */
+ int[] values;
+
+ /** Ramp duration in ms. */
+ int rampDurationMs;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioGainMode.aidl b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
new file mode 100644
index 0000000..39395e5
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioGainMode.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioGainMode {
+ JOINT = 0,
+ CHANNELS = 1,
+ RAMP = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
new file mode 100644
index 0000000..8f517e7
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioInputFlags.aidl
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioInputFlags {
+ FAST = 0,
+ HW_HOTWORD = 1,
+ RAW = 2,
+ SYNC = 3,
+ MMAP_NOIRQ = 4,
+ VOIP_TX = 5,
+ HW_AV_SYNC = 6,
+ DIRECT = 7,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
new file mode 100644
index 0000000..d5f23a1
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoConfigEvent.aidl
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+@Backing(type="int")
+enum AudioIoConfigEvent {
+ OUTPUT_REGISTERED = 0,
+ OUTPUT_OPENED = 1,
+ OUTPUT_CLOSED = 2,
+ OUTPUT_CONFIG_CHANGED = 3,
+ INPUT_REGISTERED = 4,
+ INPUT_OPENED = 5,
+ INPUT_CLOSED = 6,
+ INPUT_CONFIG_CHANGED = 7,
+ CLIENT_STARTED = 8,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
new file mode 100644
index 0000000..876ef9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoDescriptor.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPatch;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioIoDescriptor {
+ /** Interpreted as audio_io_handle_t. */
+ int ioHandle;
+ AudioPatch patch;
+ int samplingRate;
+ AudioFormat format;
+ /** Interpreted as audio_channel_mask_t. */
+ int channelMask;
+ long frameCount;
+ long frameCountHAL;
+ /** Only valid for output. */
+ int latency;
+ /**
+ * Interpreted as audio_port_handle_t.
+ * valid for event AUDIO_CLIENT_STARTED.
+ */
+ int portId;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
new file mode 100644
index 0000000..1fe2acc
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioIoFlags.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+// TODO(b/150948558): This should be a union. In the meantime, we require
+// that exactly one of the below arrays has a single element and the rest
+// are empty.
+parcelable AudioIoFlags {
+ /** Bitmask indexed by AudioInputFlags. */
+ int[] input;
+ /** Bitmask indexed by AudioOutputFlags. */
+ int[] output;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
new file mode 100644
index 0000000..aebf871
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioOutputFlags.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioOutputFlags {
+ DIRECT = 0,
+ PRIMARY = 1,
+ FAST = 2,
+ DEEP_BUFFER = 3,
+ COMPRESS_OFFLOAD = 4,
+ NON_BLOCKING = 5,
+ HW_AV_SYNC = 6,
+ TTS = 7,
+ RAW = 8,
+ SYNC = 9,
+ IEC958_NONAUDIO = 10,
+ DIRECT_PCM = 11,
+ MMAP_NOIRQ = 12,
+ VOIP_RX = 13,
+ INCALL_MUSIC = 14,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPatch.aidl b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
new file mode 100644
index 0000000..8519faf
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPatch.aidl
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfig;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPatch {
+ /**
+ * Patch unique ID.
+ * Interpreted as audio_patch_handle_t.
+ */
+ int id;
+ AudioPortConfig[] sources;
+ AudioPortConfig[] sinks;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
new file mode 100644
index 0000000..2dd30a4
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfig.aidl
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioGainConfig;
+import android.media.AudioIoFlags;
+import android.media.AudioPortConfigExt;
+import android.media.AudioPortConfigType;
+import android.media.AudioPortRole;
+import android.media.AudioPortType;
+import android.media.audio.common.AudioFormat;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfig {
+ /**
+ * Port unique ID.
+ * Interpreted as audio_port_handle_t.
+ */
+ int id;
+ /** Sink or source. */
+ AudioPortRole role;
+ /** Device, mix ... */
+ AudioPortType type;
+ /** Bitmask, indexed by AudioPortConfigType. */
+ int configMask;
+ /** Sampling rate in Hz. */
+ int sampleRate;
+ /**
+ * Channel mask, if applicable.
+ * Interpreted as audio_channel_mask_t.
+ * TODO: bitmask?
+ */
+ int channelMask;
+ /**
+ * Format, if applicable.
+ */
+ AudioFormat format;
+ /** Gain to apply, if applicable. */
+ AudioGainConfig gain;
+ /** Framework only: HW_AV_SYNC, DIRECT, ... */
+ AudioIoFlags flags;
+ AudioPortConfigExt ext;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
new file mode 100644
index 0000000..a99aa9b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigDeviceExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigDeviceExt {
+ /**
+ * Module the device is attached to.
+ * Interpreted as audio_module_handle_t.
+ */
+ int hwModule;
+ /**
+ * Device type (e.g AUDIO_DEVICE_OUT_SPEAKER).
+ * Interpreted as audio_devices_t.
+ * TODO: Convert to a standalone AIDL representation.
+ */
+ int type;
+ /** Device address. "" if N/A. */
+ @utf8InCpp String address;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
new file mode 100644
index 0000000..83e985e
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigExt.aidl
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigDeviceExt;
+import android.media.AudioPortConfigMixExt;
+import android.media.AudioPortConfigSessionExt;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigExt {
+ // TODO(b/150948558): This should be a union. In the meantime, we require
+ // that exactly one of the below arrays has a single element and the rest
+ // are empty.
+
+ /** Device specific info. */
+ AudioPortConfigDeviceExt[] device;
+ /** Mix specific info. */
+ AudioPortConfigMixExt[] mix;
+ /** Session specific info. */
+ AudioPortConfigSessionExt[] session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
new file mode 100644
index 0000000..d3226f2
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExt.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioPortConfigMixExtUseCase;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigMixExt {
+ /**
+ * Module the stream is attached to.
+ * Interpreted as audio_module_handle_t.
+ */
+ int hwModule;
+ /**
+ * I/O handle of the input/output stream.
+ * Interpreted as audio_io_handle_t.
+ */
+ int handle;
+ AudioPortConfigMixExtUseCase usecase;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
new file mode 100644
index 0000000..675daf8
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigMixExtUseCase.aidl
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSourceType;
+import android.media.AudioStreamType;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigMixExtUseCase {
+ // TODO(b/150948558): This should be a union. In the meantime, we require
+ // that exactly one of the below arrays has a single element and the rest
+ // are empty.
+
+ /** This to be set if the containing config has the AudioPortRole::SOURCE role. */
+ AudioStreamType[] stream;
+ /** This to be set if the containing config has the AudioPortRole::SINK role. */
+ AudioSourceType[] source;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
new file mode 100644
index 0000000..d3261d9
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigSessionExt.aidl
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSessionType;
+
+/**
+ * {@hide}
+ */
+parcelable AudioPortConfigSessionExt {
+ AudioSessionType session;
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
new file mode 100644
index 0000000..c7bb4d8
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortConfigType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioPortConfigType {
+ SAMPLE_RATE = 0,
+ CHANNEL_MASK = 1,
+ FORMAT = 2,
+ GAIN = 3,
+ FLAGS = 4,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortRole.aidl b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
new file mode 100644
index 0000000..3212325
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortRole.aidl
@@ -0,0 +1,23 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioPortRole {
+ NONE = 0,
+ SOURCE = 1,
+ SINK = 2,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPortType.aidl b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
new file mode 100644
index 0000000..90eea9a
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioPortType.aidl
@@ -0,0 +1,24 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioPortType {
+ NONE = 0,
+ DEVICE = 1,
+ MIX = 2,
+ SESSION = 3,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioSessionType.aidl b/media/libaudioclient/aidl/android/media/AudioSessionType.aidl
new file mode 100644
index 0000000..d305c29
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioSessionType.aidl
@@ -0,0 +1,25 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioSessionType {
+ DEVICE = -2,
+ OUTPUT_STAGE = -1,
+ OUTPUT_MIX = 0,
+ ALLOCATE = 0,
+ NONE = 0,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioSourceType.aidl b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
new file mode 100644
index 0000000..f6ecc46
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioSourceType.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioSourceType {
+ DEFAULT = 0,
+ MIC = 1,
+ VOICE_UPLINK = 2,
+ VOICE_DOWNLINK = 3,
+ VOICE_CALL = 4,
+ CAMCORDER = 5,
+ VOICE_RECOGNITION = 6,
+ VOICE_COMMUNICATION = 7,
+ REMOTE_SUBMIX = 8,
+ UNPROCESSED = 9,
+ VOICE_PERFORMANCE = 10,
+ ECHO_REFERENCE = 1997,
+ FM_TUNER = 1998,
+ /**
+ * A low-priority, preemptible audio source for for background software
+ * hotword detection. Same tuning as VOICE_RECOGNITION.
+ * Used only internally by the framework.
+ */
+ HOTWORD = 1999,
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioStreamType.aidl b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
new file mode 100644
index 0000000..803b87b
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/AudioStreamType.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+@Backing(type="int")
+enum AudioStreamType {
+ DEFAULT = -1,
+ VOICE_CALL = 0,
+ SYSTEM = 1,
+ RING = 2,
+ MUSIC = 3,
+ ALARM = 4,
+ NOTIFICATION = 5,
+ BLUETOOTH_SCO = 6,
+ ENFORCED_AUDIBLE = 7,
+ DTMF = 8,
+ TTS = 9,
+ ACCESSIBILITY = 10,
+ ASSISTANT = 11,
+ /** For dynamic policy output mixes. Only used by the audio policy */
+ REROUTING = 12,
+ /** For audio flinger tracks volume. Only used by the audioflinger */
+ PATCH = 13,
+ /** stream for corresponding to AUDIO_USAGE_CALL_ASSISTANT */
+ CALL_ASSISTANT = 14,
+}
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
new file mode 100644
index 0000000..421c31c
--- /dev/null
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerClient.aidl
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioIoConfigEvent;
+import android.media.AudioIoDescriptor;
+
+/**
+ * A callback interface for AudioFlinger.
+ *
+ * {@hide}
+ */
+interface IAudioFlingerClient {
+ oneway void ioConfigChanged(AudioIoConfigEvent event,
+ in AudioIoDescriptor ioDesc);
+}
diff --git a/media/libaudioclient/include/media/AidlConversion.h b/media/libaudioclient/include/media/AidlConversion.h
new file mode 100644
index 0000000..a1b9b82
--- /dev/null
+++ b/media/libaudioclient/include/media/AidlConversion.h
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <system/audio.h>
+
+#include <android-base/result.h>
+#include <android/media/AudioGainMode.h>
+#include <android/media/AudioInputFlags.h>
+#include <android/media/AudioIoConfigEvent.h>
+#include <android/media/AudioIoDescriptor.h>
+#include <android/media/AudioOutputFlags.h>
+#include <android/media/AudioPortConfigType.h>
+
+#include <media/AudioIoDescriptor.h>
+
+namespace android {
+
+template <typename T>
+using ConversionResult = base::expected<T, status_t>;
+
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<int> aidl2legacy_AudioPortConfigType(media::AudioPortConfigType aidl);
+// The legacy enum is unnamed. Thus, we use int.
+ConversionResult<media::AudioPortConfigType> legacy2aidl_AudioPortConfigType(int legacy);
+
+ConversionResult<unsigned int> aidl2legacy_int32_t_config_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_config_mask_int32_t(unsigned int legacy);
+
+ConversionResult<audio_channel_mask_t> aidl2legacy_int32_t_audio_channel_mask_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_channel_mask_t_int32_t(audio_channel_mask_t legacy);
+
+ConversionResult<audio_io_config_event> aidl2legacy_AudioIoConfigEvent_audio_io_config_event(
+ media::AudioIoConfigEvent aidl);
+ConversionResult<media::AudioIoConfigEvent> legacy2aidl_audio_io_config_event_AudioIoConfigEvent(
+ audio_io_config_event legacy);
+
+ConversionResult<audio_port_role_t> aidl2legacy_AudioPortRole_audio_port_role_t(
+ media::AudioPortRole aidl);
+ConversionResult<media::AudioPortRole> legacy2aidl_audio_port_role_t_AudioPortRole(
+ audio_port_role_t legacy);
+
+ConversionResult<audio_port_type_t> aidl2legacy_AudioPortType_audio_port_type_t(
+ media::AudioPortType aidl);
+ConversionResult<media::AudioPortType> legacy2aidl_audio_port_type_t_AudioPortType(
+ audio_port_type_t legacy);
+
+ConversionResult<audio_format_t> aidl2legacy_AudioFormat_audio_format_t(
+ media::audio::common::AudioFormat aidl);
+ConversionResult<media::audio::common::AudioFormat> legacy2aidl_audio_format_t_AudioFormat(
+ audio_format_t legacy);
+
+ConversionResult<int> aidl2legacy_AudioGainMode_int(media::AudioGainMode aidl);
+ConversionResult<media::AudioGainMode> legacy2aidl_int_AudioGainMode(int legacy);
+
+ConversionResult<audio_gain_mode_t> aidl2legacy_int32_t_audio_gain_mode_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_gain_mode_t_int32_t(audio_gain_mode_t legacy);
+
+ConversionResult<audio_devices_t> aidl2legacy_int32_t_audio_devices_t(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_devices_t_int32_t(audio_devices_t legacy);
+
+ConversionResult<audio_gain_config> aidl2legacy_AudioGainConfig_audio_gain_config(
+ const media::AudioGainConfig& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioGainConfig> legacy2aidl_audio_gain_config_AudioGainConfig(
+ const audio_gain_config& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_AudioInputFlags_audio_input_flags_t(
+ media::AudioInputFlags aidl);
+ConversionResult<media::AudioInputFlags> legacy2aidl_audio_input_flags_t_AudioInputFlags(
+ audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_AudioOutputFlags_audio_output_flags_t(
+ media::AudioOutputFlags aidl);
+ConversionResult<media::AudioOutputFlags> legacy2aidl_audio_output_flags_t_AudioOutputFlags(
+ audio_output_flags_t legacy);
+
+ConversionResult<audio_input_flags_t> aidl2legacy_audio_input_flags_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_input_flags_mask(audio_input_flags_t legacy);
+
+ConversionResult<audio_output_flags_t> aidl2legacy_audio_output_flags_mask(int32_t aidl);
+ConversionResult<int32_t> legacy2aidl_audio_output_flags_mask(audio_output_flags_t legacy);
+
+ConversionResult<audio_io_flags> aidl2legacy_AudioIoFlags_audio_io_flags(
+ const media::AudioIoFlags& aidl, media::AudioPortRole role, media::AudioPortType type);
+ConversionResult<media::AudioIoFlags> legacy2aidl_audio_io_flags_AudioIoFlags(
+ const audio_io_flags& legacy, audio_port_role_t role, audio_port_type_t type);
+
+ConversionResult<audio_port_config_device_ext> aidl2legacy_AudioPortConfigDeviceExt(
+ const media::AudioPortConfigDeviceExt& aidl);
+ConversionResult<media::AudioPortConfigDeviceExt> legacy2aidl_AudioPortConfigDeviceExt(
+ const audio_port_config_device_ext& legacy);
+
+ConversionResult<audio_stream_type_t> aidl2legacy_AudioStreamType_audio_stream_type_t(
+ media::AudioStreamType aidl);
+ConversionResult<media::AudioStreamType> legacy2aidl_audio_stream_type_t_AudioStreamType(
+ audio_stream_type_t legacy);
+
+ConversionResult<audio_source_t> aidl2legacy_AudioSourceType_audio_source_t(
+ media::AudioSourceType aidl);
+ConversionResult<media::AudioSourceType> legacy2aidl_audio_source_t_AudioSourceType(
+ audio_source_t legacy);
+
+ConversionResult<audio_session_t> aidl2legacy_AudioSessionType_audio_session_t(
+ media::AudioSessionType aidl);
+ConversionResult<media::AudioSessionType> legacy2aidl_audio_session_t_AudioSessionType(
+ audio_session_t legacy);
+
+ConversionResult<audio_port_config_mix_ext> aidl2legacy_AudioPortConfigMixExt(
+ const media::AudioPortConfigMixExt& aidl, media::AudioPortRole role);
+ConversionResult<media::AudioPortConfigMixExt> legacy2aidl_AudioPortConfigMixExt(
+ const audio_port_config_mix_ext& legacy, audio_port_role_t role);
+
+ConversionResult<audio_port_config_session_ext> aidl2legacy_AudioPortConfigSessionExt(
+ const media::AudioPortConfigSessionExt& aidl);
+ConversionResult<media::AudioPortConfigSessionExt> legacy2aidl_AudioPortConfigSessionExt(
+ const audio_port_config_session_ext& legacy);
+
+ConversionResult<audio_port_config> aidl2legacy_AudioPortConfig_audio_port_config(
+ const media::AudioPortConfig& aidl);
+ConversionResult<media::AudioPortConfig> legacy2aidl_audio_port_config_AudioPortConfig(
+ const audio_port_config& legacy);
+
+ConversionResult<struct audio_patch> aidl2legacy_AudioPatch_audio_patch(
+ const media::AudioPatch& aidl);
+ConversionResult<media::AudioPatch> legacy2aidl_audio_patch_AudioPatch(
+ const struct audio_patch& legacy);
+
+ConversionResult<sp<AudioIoDescriptor>> aidl2legacy_AudioIoDescriptor_AudioIoDescriptor(
+ const media::AudioIoDescriptor& aidl);
+ConversionResult<media::AudioIoDescriptor> legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(
+ const sp<AudioIoDescriptor>& legacy);
+
+} // namespace android
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 848743a..dfc1982 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -19,12 +19,12 @@
#include <sys/types.h>
+#include <android/media/BnAudioFlingerClient.h>
#include <media/AudioDeviceTypeAddr.h>
#include <media/AudioPolicy.h>
#include <media/AudioProductStrategy.h>
#include <media/AudioVolumeGroup.h>
#include <media/AudioIoDescriptor.h>
-#include <media/IAudioFlingerClient.h>
#include <media/IAudioPolicyServiceClient.h>
#include <media/MicrophoneInfo.h>
#include <set>
@@ -531,7 +531,7 @@
private:
- class AudioFlingerClient: public IBinder::DeathRecipient, public BnAudioFlingerClient
+ class AudioFlingerClient: public IBinder::DeathRecipient, public media::BnAudioFlingerClient
{
public:
AudioFlingerClient() :
@@ -551,9 +551,9 @@
// indicate a change in the configuration of an output or input: keeps the cached
// values for output/input parameters up-to-date in client process
- virtual void ioConfigChanged(audio_io_config_event event,
- const sp<AudioIoDescriptor>& ioDesc);
-
+ binder::Status ioConfigChanged(
+ media::AudioIoConfigEvent event,
+ const media::AudioIoDescriptor& ioDesc) override;
status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
audio_io_handle_t audioIo,
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index a01b681..413db71 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -29,7 +29,6 @@
#include <media/AudioClient.h>
#include <media/DeviceDescriptorBase.h>
#include <media/IAudioTrack.h>
-#include <media/IAudioFlingerClient.h>
#include <system/audio.h>
#include <system/audio_effect.h>
#include <system/audio_policy.h>
@@ -39,6 +38,7 @@
#include <vector>
#include "android/media/IAudioRecord.h"
+#include "android/media/IAudioFlingerClient.h"
#include "android/media/IAudioTrackCallback.h"
#include "android/media/IEffect.h"
#include "android/media/IEffectClient.h"
@@ -420,7 +420,7 @@
// Register an object to receive audio input/output change and track notifications.
// For a given calling pid, AudioFlinger disregards any registrations after the first.
// Thus the IAudioFlingerClient must be a singleton per process.
- virtual void registerClient(const sp<IAudioFlingerClient>& client) = 0;
+ virtual void registerClient(const sp<media::IAudioFlingerClient>& client) = 0;
// retrieve the audio recording buffer size in bytes
// FIXME This API assumes a route, and so should be deprecated.
diff --git a/media/libaudioclient/include/media/IAudioFlingerClient.h b/media/libaudioclient/include/media/IAudioFlingerClient.h
deleted file mode 100644
index 0080bc9..0000000
--- a/media/libaudioclient/include/media/IAudioFlingerClient.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_IAUDIOFLINGERCLIENT_H
-#define ANDROID_IAUDIOFLINGERCLIENT_H
-
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <utils/KeyedVector.h>
-#include <system/audio.h>
-#include <media/AudioIoDescriptor.h>
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-class IAudioFlingerClient : public IInterface
-{
-public:
- DECLARE_META_INTERFACE(AudioFlingerClient);
-
- // Notifies a change of audio input/output configuration.
- virtual void ioConfigChanged(audio_io_config_event event,
- const sp<AudioIoDescriptor>& ioDesc) = 0;
-
-};
-
-
-// ----------------------------------------------------------------------------
-
-class BnAudioFlingerClient : public BnInterface<IAudioFlingerClient>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
-#endif // ANDROID_IAUDIOFLINGERCLIENT_H
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 26eaaf8..1696233 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -138,7 +138,7 @@
template <>
bool stringToStreamType(const char *streamName, audio_devices_t* type) {
- return deviceFromString(streamName, *type);
+ return DeviceConverter::fromString(streamName, *type);
}
/** Parse a library xml note and push the result in libraries or return false on failure. */
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index c08d187..8a4b17c 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -974,7 +974,7 @@
case PREPARE_DRM: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- uint8_t uuid[16];
+ uint8_t uuid[16] = {};
data.read(uuid, sizeof(uuid));
Vector<uint8_t> drmSessionId;
readVector(data, drmSessionId);
diff --git a/media/libmediahelper/TEST_MAPPING b/media/libmediahelper/TEST_MAPPING
new file mode 100644
index 0000000..f9594bd
--- /dev/null
+++ b/media/libmediahelper/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "libmedia_helper_tests"
+ }
+ ]
+}
diff --git a/media/libmediahelper/TypeConverter.cpp b/media/libmediahelper/TypeConverter.cpp
index 876dc45..d3a517f 100644
--- a/media/libmediahelper/TypeConverter.cpp
+++ b/media/libmediahelper/TypeConverter.cpp
@@ -18,315 +18,9 @@
namespace android {
-#define MAKE_STRING_FROM_ENUM(string) { #string, string }
+#define MAKE_STRING_FROM_ENUM(enumval) { #enumval, enumval }
#define TERMINATOR { .literal = nullptr }
-template <>
-const OutputDeviceConverter::Table OutputDeviceConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_EARPIECE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
- // TODO(mnaganov): Remove from here, use 'audio_is_bluetooth_out_sco_device' function.
- { "AUDIO_DEVICE_OUT_ALL_SCO", static_cast<audio_devices_t>(AUDIO_DEVICE_OUT_ALL_SCO) },
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
- // TODO(mnaganov): Remove from here, use 'audio_is_a2dp_out_device' function.
- { "AUDIO_DEVICE_OUT_ALL_A2DP", static_cast<audio_devices_t>(AUDIO_DEVICE_OUT_ALL_A2DP) },
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE),
- // TODO(mnaganov): Remove from here, use 'audio_is_usb_out_device' function.
- { "AUDIO_DEVICE_OUT_ALL_USB", static_cast<audio_devices_t>(AUDIO_DEVICE_OUT_ALL_USB) },
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_SPDIF),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_FM),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_AUX_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_IP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BUS),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_PROXY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_USB_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_HEARING_AID),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_ECHO_CANCELLER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLE_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_BLE_SPEAKER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_DEFAULT),
- // STUB must be after DEFAULT, so the latter is picked up by toString first.
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_OUT_STUB),
- TERMINATOR
-};
-
-template <>
-const InputDeviceConverter::Table InputDeviceConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_COMMUNICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AMBIENT),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET),
- // TODO(mnaganov): Remove from here, use 'audio_is_bluetooth_in_sco_device' function.
- { "AUDIO_DEVICE_IN_ALL_SCO", static_cast<audio_devices_t>(AUDIO_DEVICE_IN_ALL_SCO) },
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_HDMI_ARC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BACK_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_DEVICE),
- // TODO(mnaganov): Remove from here, use 'audio_is_usb_in_device' function.
- { "AUDIO_DEVICE_IN_ALL_USB", static_cast<audio_devices_t>(AUDIO_DEVICE_IN_ALL_USB) },
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_FM_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_TV_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LINE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_SPDIF),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_LOOPBACK),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_IP),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BUS),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_PROXY),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_USB_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_BLE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_ECHO_REFERENCE),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_BLE_HEADSET),
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_DEFAULT),
- // STUB must be after DEFAULT, so the latter is picked up by toString first.
- MAKE_STRING_FROM_ENUM(AUDIO_DEVICE_IN_STUB),
- TERMINATOR
-};
-
-
-template <>
-const OutputFlagConverter::Table OutputFlagConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_FAST),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_TTS),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_RAW),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_DIRECT_PCM),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_VOIP_RX),
- MAKE_STRING_FROM_ENUM(AUDIO_OUTPUT_FLAG_INCALL_MUSIC),
- TERMINATOR
-};
-
-
-template <>
-const InputFlagConverter::Table InputFlagConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_NONE),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_FAST),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_RAW),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_MMAP_NOIRQ),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_VOIP_TX),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_HW_AV_SYNC),
- MAKE_STRING_FROM_ENUM(AUDIO_INPUT_FLAG_DIRECT),
- TERMINATOR
-};
-
-
-template <>
-const FormatConverter::Table FormatConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_16_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_32_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_8_24_BIT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_FLOAT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_NB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_MAIN),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SSR),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LTP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_SCALABLE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ERLC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ELD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_XHE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_MAIN),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SSR),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LTP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_SCALABLE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ERLC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_LD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_ELD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS_XHE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_VORBIS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_HE_AAC_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_OPUS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DTS_HD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_IEC61937),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DOLBY_TRUEHD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCWB),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_EVRCNW),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADIF),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_WMA_PRO),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AMR_WB_PLUS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MP2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_QCELP),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_DSD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_FLAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_ALAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_ADTS),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_SBC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_HD),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AC4),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LDAC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_E_AC3_JOC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_1_0),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_0),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_MAT_2_1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_LC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V1),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_AAC_LATM_HE_V2),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_CELT),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_ADAPTIVE),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC_LL),
- MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_TWSP),
- TERMINATOR
-};
-
-
-template <>
-const OutputChannelConverter::Table OutputChannelConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_2POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_TRI_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_3POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_QUAD_SIDE),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_SURROUND),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_PENTA),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1_SIDE),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_5POINT1POINT4),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_6POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_7POINT1POINT4),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_A),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_A),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_A),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_HAPTIC_AB),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_MONO_HAPTIC_AB),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB),
- TERMINATOR
-};
-
-
-template <>
-const InputChannelConverter::Table InputChannelConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_STEREO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_6),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_2POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT0POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_3POINT1POINT2),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_5POINT1),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO),
- MAKE_STRING_FROM_ENUM(AUDIO_CHANNEL_IN_VOICE_CALL_MONO),
- TERMINATOR
-};
-
-template <>
-const ChannelIndexConverter::Table ChannelIndexConverter::mTable[] = {
- {"AUDIO_CHANNEL_INDEX_MASK_1", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_1)},
- {"AUDIO_CHANNEL_INDEX_MASK_2", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_2)},
- {"AUDIO_CHANNEL_INDEX_MASK_3", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_3)},
- {"AUDIO_CHANNEL_INDEX_MASK_4", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_4)},
- {"AUDIO_CHANNEL_INDEX_MASK_5", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_5)},
- {"AUDIO_CHANNEL_INDEX_MASK_6", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_6)},
- {"AUDIO_CHANNEL_INDEX_MASK_7", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_7)},
- {"AUDIO_CHANNEL_INDEX_MASK_8", static_cast<audio_channel_mask_t>(AUDIO_CHANNEL_INDEX_MASK_8)},
- TERMINATOR
-};
-
-
-template <>
-const GainModeConverter::Table GainModeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_JOINT),
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_CHANNELS),
- MAKE_STRING_FROM_ENUM(AUDIO_GAIN_MODE_RAMP),
- TERMINATOR
-};
-
-
-template <>
-const StreamTypeConverter::Table StreamTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DEFAULT),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_SYSTEM),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_RING),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_MUSIC),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ALARM),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_NOTIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_BLUETOOTH_SCO ),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ENFORCED_AUDIBLE),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_DTMF),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_TTS),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ACCESSIBILITY),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_ASSISTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_REROUTING),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_PATCH),
- MAKE_STRING_FROM_ENUM(AUDIO_STREAM_CALL_ASSISTANT),
- TERMINATOR
-};
-
template<>
const AudioModeConverter::Table AudioModeConverter::mTable[] = {
MAKE_STRING_FROM_ENUM(AUDIO_MODE_INVALID),
@@ -339,62 +33,6 @@
TERMINATOR
};
-template<>
-const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_UNKNOWN),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SPEECH),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MUSIC),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_MOVIE),
- MAKE_STRING_FROM_ENUM(AUDIO_CONTENT_TYPE_SONIFICATION),
- TERMINATOR
-};
-
-template <>
-const UsageTypeConverter::Table UsageTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_UNKNOWN),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_MEDIA),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ALARM),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_EVENT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_GAME),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CALL_ASSISTANT),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_EMERGENCY),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_SAFETY),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VEHICLE_STATUS),
- MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ANNOUNCEMENT),
- TERMINATOR
-};
-
-template <>
-const SourceTypeConverter::Table SourceTypeConverter::mTable[] = {
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_DEFAULT),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_MIC),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_UPLINK),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_DOWNLINK),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_CALL),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_CAMCORDER),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_RECOGNITION),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_COMMUNICATION),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_REMOTE_SUBMIX),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_UNPROCESSED),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_PERFORMANCE),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_ECHO_REFERENCE),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_FM_TUNER),
- MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_HOTWORD),
- TERMINATOR
-};
-
template <>
const AudioFlagConverter::Table AudioFlagConverter::mTable[] = {
MAKE_STRING_FROM_ENUM(AUDIO_FLAG_NONE),
@@ -417,6 +55,7 @@
template class TypeConverter<OutputDeviceTraits>;
template class TypeConverter<InputDeviceTraits>;
+template class TypeConverter<DeviceTraits>;
template class TypeConverter<OutputFlagTraits>;
template class TypeConverter<InputFlagTraits>;
template class TypeConverter<FormatTraits>;
@@ -430,11 +69,6 @@
template class TypeConverter<SourceTraits>;
template class TypeConverter<AudioFlagTraits>;
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device) {
- return InputDeviceConverter::fromString(literalDevice, device) ||
- OutputDeviceConverter::fromString(literalDevice, device);
-}
-
SampleRateTraits::Collection samplingRatesFromString(
const std::string &samplingRates, const char *del)
{
@@ -454,21 +88,20 @@
audio_format_t formatFromString(const std::string &literalFormat, audio_format_t defaultFormat)
{
audio_format_t format;
- if (literalFormat.empty()) {
- return defaultFormat;
+ if (!literalFormat.empty() && FormatConverter::fromString(literalFormat, format)) {
+ return format;
}
- FormatConverter::fromString(literalFormat, format);
- return format;
+ return defaultFormat;
}
audio_channel_mask_t channelMaskFromString(const std::string &literalChannels)
{
audio_channel_mask_t channels;
- if (!OutputChannelConverter::fromString(literalChannels, channels) &&
- !InputChannelConverter::fromString(literalChannels, channels)) {
- return AUDIO_CHANNEL_INVALID;
+ if (!literalChannels.empty() &&
+ audio_channel_mask_from_string(literalChannels.c_str(), &channels)) {
+ return channels;
}
- return channels;
+ return AUDIO_CHANNEL_INVALID;
}
ChannelTraits::Collection channelMasksFromString(
diff --git a/media/libmediahelper/include/media/TypeConverter.h b/media/libmediahelper/include/media/TypeConverter.h
index 011498a..42ccb5f 100644
--- a/media/libmediahelper/include/media/TypeConverter.h
+++ b/media/libmediahelper/include/media/TypeConverter.h
@@ -24,8 +24,6 @@
#include <system/audio.h>
#include <utils/Log.h>
-#include <utils/Vector.h>
-#include <utils/SortedVector.h>
#include <media/AudioParameter.h>
#include "convert.h"
@@ -43,16 +41,6 @@
}
};
template <typename T>
-struct SortedVectorTraits
-{
- typedef T Type;
- typedef SortedVector<Type> Collection;
- static void add(Collection &collection, Type value)
- {
- collection.add(value);
- }
-};
-template <typename T>
struct SetTraits
{
typedef T Type;
@@ -108,13 +96,20 @@
typename Traits::Collection &collection,
const char *del = AudioParameter::valueListSeparator);
- static uint32_t maskFromString(
+ static typename Traits::Type maskFromString(
const std::string &str, const char *del = AudioParameter::valueListSeparator);
static void maskToString(
- uint32_t mask, std::string &str, const char *del = AudioParameter::valueListSeparator);
+ typename Traits::Type mask, std::string &str,
+ const char *del = AudioParameter::valueListSeparator);
protected:
+ // Default implementations use mTable for to/from string conversions
+ // of each individual enum value.
+ // These functions may be specialized to use external converters instead.
+ static bool toStringImpl(const typename Traits::Type &value, std::string &str);
+ static bool fromStringImpl(const std::string &str, typename Traits::Type &result);
+
struct Table {
const char *literal;
typename Traits::Type value;
@@ -124,26 +119,22 @@
};
template <class Traits>
-inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
-{
+inline bool TypeConverter<Traits>::toStringImpl(
+ const typename Traits::Type &value, std::string &str) {
for (size_t i = 0; mTable[i].literal; i++) {
if (mTable[i].value == value) {
str = mTable[i].literal;
return true;
}
}
- char result[64];
- snprintf(result, sizeof(result), "Unknown enum value %d", value);
- str = result;
return false;
}
template <class Traits>
-inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
-{
+inline bool TypeConverter<Traits>::fromStringImpl(
+ const std::string &str, typename Traits::Type &result) {
for (size_t i = 0; mTable[i].literal; i++) {
if (strcmp(mTable[i].literal, str.c_str()) == 0) {
- ALOGV("stringToEnum() found %s", mTable[i].literal);
result = mTable[i].value;
return true;
}
@@ -152,6 +143,26 @@
}
template <class Traits>
+inline bool TypeConverter<Traits>::toString(const typename Traits::Type &value, std::string &str)
+{
+ const bool success = toStringImpl(value, str);
+ if (!success) {
+ char result[64];
+ snprintf(result, sizeof(result), "Unknown enum value %d", value);
+ str = result;
+ }
+ return success;
+}
+
+template <class Traits>
+inline bool TypeConverter<Traits>::fromString(const std::string &str, typename Traits::Type &result)
+{
+ const bool success = fromStringImpl(str, result);
+ ALOGV_IF(success, "stringToEnum() found %s", str.c_str());
+ return success;
+}
+
+template <class Traits>
inline void TypeConverter<Traits>::collectionFromString(const std::string &str,
typename Traits::Collection &collection,
const char *del)
@@ -168,7 +179,8 @@
}
template <class Traits>
-inline uint32_t TypeConverter<Traits>::maskFromString(const std::string &str, const char *del)
+inline typename Traits::Type TypeConverter<Traits>::maskFromString(
+ const std::string &str, const char *del)
{
char *literal = strdup(str.c_str());
uint32_t value = 0;
@@ -179,20 +191,24 @@
}
}
free(literal);
- return value;
+ return static_cast<typename Traits::Type>(value);
}
template <class Traits>
-inline void TypeConverter<Traits>::maskToString(uint32_t mask, std::string &str, const char *del)
+inline void TypeConverter<Traits>::maskToString(
+ typename Traits::Type mask, std::string &str, const char *del)
{
if (mask != 0) {
bool first_flag = true;
- for (size_t i = 0; mTable[i].literal; i++) {
- uint32_t value = static_cast<uint32_t>(mTable[i].value);
- if (mTable[i].value != 0 && ((mask & value) == value)) {
- if (!first_flag) str += del;
- first_flag = false;
- str += mTable[i].literal;
+ for (size_t bit = 0; bit < sizeof(uint32_t) * 8; ++bit) {
+ uint32_t flag = 1u << bit;
+ if ((flag & mask) == flag) {
+ std::string flag_str;
+ if (toString(static_cast<typename Traits::Type>(flag), flag_str)) {
+ if (!first_flag) str += del;
+ first_flag = false;
+ str += flag_str;
+ }
}
}
} else {
@@ -200,6 +216,7 @@
}
}
+typedef TypeConverter<DeviceTraits> DeviceConverter;
typedef TypeConverter<OutputDeviceTraits> OutputDeviceConverter;
typedef TypeConverter<InputDeviceTraits> InputDeviceConverter;
typedef TypeConverter<OutputFlagTraits> OutputFlagConverter;
@@ -216,23 +233,227 @@
typedef TypeConverter<SourceTraits> SourceTypeConverter;
typedef TypeConverter<AudioFlagTraits> AudioFlagConverter;
-template<> const OutputDeviceConverter::Table OutputDeviceConverter::mTable[];
-template<> const InputDeviceConverter::Table InputDeviceConverter::mTable[];
-template<> const OutputFlagConverter::Table OutputFlagConverter::mTable[];
-template<> const InputFlagConverter::Table InputFlagConverter::mTable[];
-template<> const FormatConverter::Table FormatConverter::mTable[];
-template<> const OutputChannelConverter::Table OutputChannelConverter::mTable[];
-template<> const InputChannelConverter::Table InputChannelConverter::mTable[];
-template<> const ChannelIndexConverter::Table ChannelIndexConverter::mTable[];
-template<> const GainModeConverter::Table GainModeConverter::mTable[];
-template<> const StreamTypeConverter::Table StreamTypeConverter::mTable[];
template<> const AudioModeConverter::Table AudioModeConverter::mTable[];
-template<> const AudioContentTypeConverter::Table AudioContentTypeConverter::mTable[];
-template<> const UsageTypeConverter::Table UsageTypeConverter::mTable[];
-template<> const SourceTypeConverter::Table SourceTypeConverter::mTable[];
template<> const AudioFlagConverter::Table AudioFlagConverter::mTable[];
-bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
+template <>
+inline bool TypeConverter<DeviceTraits>::toStringImpl(
+ const DeviceTraits::Type &value, std::string &str) {
+ str = audio_device_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<DeviceTraits>::fromStringImpl(
+ const std::string &str, DeviceTraits::Type &result) {
+ return audio_device_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::toStringImpl(
+ const OutputDeviceTraits::Type &value, std::string &str) {
+ if (audio_is_output_device(value)) {
+ str = audio_device_to_string(value);
+ return !str.empty();
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<OutputDeviceTraits>::fromStringImpl(
+ const std::string &str, OutputDeviceTraits::Type &result) {
+ OutputDeviceTraits::Type temp;
+ if (audio_device_from_string(str.c_str(), &temp) &&
+ audio_is_output_device(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::toStringImpl(
+ const InputDeviceTraits::Type &value, std::string &str) {
+ if (audio_is_input_device(value)) {
+ str = audio_device_to_string(value);
+ return !str.empty();
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputDeviceTraits>::fromStringImpl(
+ const std::string &str, InputDeviceTraits::Type &result) {
+ InputDeviceTraits::Type temp;
+ if (audio_device_from_string(str.c_str(), &temp) &&
+ audio_is_input_device(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::toStringImpl(
+ const audio_input_flags_t &value, std::string &str) {
+ str = audio_input_flag_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputFlagTraits>::fromStringImpl(
+ const std::string &str, audio_input_flags_t &result) {
+ return audio_input_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::toStringImpl(
+ const audio_output_flags_t &value, std::string &str) {
+ str = audio_output_flag_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputFlagTraits>::fromStringImpl(
+ const std::string &str, audio_output_flags_t &result) {
+ return audio_output_flag_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::toStringImpl(
+ const audio_format_t &value, std::string &str) {
+ str = audio_format_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<FormatTraits>::fromStringImpl(
+ const std::string &str, audio_format_t &result) {
+ return audio_format_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::toStringImpl(
+ const audio_channel_mask_t &value, std::string &str) {
+ str = audio_channel_out_mask_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<OutputChannelTraits>::fromStringImpl(
+ const std::string &str, audio_channel_mask_t &result) {
+ OutputChannelTraits::Type temp;
+ if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+ audio_is_output_channel(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::toStringImpl(
+ const audio_channel_mask_t &value, std::string &str) {
+ str = audio_channel_in_mask_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<InputChannelTraits>::fromStringImpl(
+ const std::string &str, audio_channel_mask_t &result) {
+ InputChannelTraits::Type temp;
+ if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+ audio_is_input_channel(temp)) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::toStringImpl(
+ const audio_channel_mask_t &value, std::string &str) {
+ str = audio_channel_index_mask_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<ChannelIndexTraits>::fromStringImpl(
+ const std::string &str, audio_channel_mask_t &result) {
+ ChannelIndexTraits::Type temp;
+ if (audio_channel_mask_from_string(str.c_str(), &temp) &&
+ audio_channel_mask_get_representation(temp) == AUDIO_CHANNEL_REPRESENTATION_INDEX) {
+ result = temp;
+ return true;
+ }
+ return false;
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::toStringImpl(
+ const audio_stream_type_t &value, std::string &str) {
+ str = audio_stream_type_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<StreamTraits>::fromStringImpl(
+ const std::string &str, audio_stream_type_t &result)
+{
+ return audio_stream_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::toStringImpl(
+ const audio_gain_mode_t &value, std::string &str) {
+ str = audio_gain_mode_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<GainModeTraits>::fromStringImpl(
+ const std::string &str, audio_gain_mode_t &result) {
+ return audio_gain_mode_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::toStringImpl(
+ const audio_content_type_t &value, std::string &str) {
+ str = audio_content_type_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<AudioContentTraits>::fromStringImpl(
+ const std::string &str, audio_content_type_t &result) {
+ return audio_content_type_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::toStringImpl(const audio_usage_t &value, std::string &str)
+{
+ str = audio_usage_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<UsageTraits>::fromStringImpl(
+ const std::string &str, audio_usage_t &result) {
+ return audio_usage_from_string(str.c_str(), &result);
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::toStringImpl(const audio_source_t &value, std::string &str)
+{
+ str = audio_source_to_string(value);
+ return !str.empty();
+}
+
+template <>
+inline bool TypeConverter<SourceTraits>::fromStringImpl(
+ const std::string &str, audio_source_t &result) {
+ return audio_source_from_string(str.c_str(), &result);
+}
SampleRateTraits::Collection samplingRatesFromString(
const std::string &samplingRates, const char *del = AudioParameter::valueListSeparator);
@@ -256,6 +477,7 @@
// counting enumerations
template <typename T, std::enable_if_t<std::is_same<T, audio_content_type_t>::value
+ || std::is_same<T, audio_devices_t>::value
|| std::is_same<T, audio_mode_t>::value
|| std::is_same<T, audio_source_t>::value
|| std::is_same<T, audio_stream_type_t>::value
@@ -282,17 +504,6 @@
return result;
}
-static inline std::string toString(const audio_devices_t& devices)
-{
- std::string result;
- if ((devices & AUDIO_DEVICE_BIT_IN) != 0) {
- InputDeviceConverter::maskToString(devices, result);
- } else {
- OutputDeviceConverter::maskToString(devices, result);
- }
- return result;
-}
-
static inline std::string toString(const audio_attributes_t& attributes)
{
std::ostringstream result;
diff --git a/media/libmediahelper/tests/Android.bp b/media/libmediahelper/tests/Android.bp
new file mode 100644
index 0000000..c5ba122
--- /dev/null
+++ b/media/libmediahelper/tests/Android.bp
@@ -0,0 +1,22 @@
+cc_test {
+ name: "libmedia_helper_tests",
+
+ generated_headers: ["audio_policy_configuration_V7_0"],
+ generated_sources: ["audio_policy_configuration_V7_0"],
+ header_libs: ["libxsdc-utils"],
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libmedia_helper",
+ "libxml2",
+ ],
+
+ srcs: ["typeconverter_tests.cpp"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ test_suites: ["device-tests"],
+}
diff --git a/media/libmediahelper/tests/typeconverter_tests.cpp b/media/libmediahelper/tests/typeconverter_tests.cpp
new file mode 100644
index 0000000..ab55c13
--- /dev/null
+++ b/media/libmediahelper/tests/typeconverter_tests.cpp
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "TypeConverter_Test"
+#include <log/log.h>
+
+#include <audio_policy_configuration_V7_0.h>
+#include <media/TypeConverter.h>
+#include <system/audio.h>
+#include <xsdc/XsdcSupport.h>
+
+using namespace android;
+namespace xsd {
+using namespace audio::policy::configuration::V7_0;
+}
+
+TEST(TypeConverter, ParseChannelMasks) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_channel_mask_t channelMask = channelMaskFromString(stringVal);
+ EXPECT_EQ(stringVal != "AUDIO_CHANNEL_NONE", audio_channel_mask_is_valid(channelMask))
+ << "Validity of \"" << stringVal << "\" is not as expected";
+ }
+}
+
+TEST(TypeConverter, ParseInputOutputIndexChannelMask) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioChannelMask>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_channel_mask_t channelMask, channelMaskBack;
+ std::string stringValBack;
+ if (stringVal.find("_CHANNEL_IN_") != std::string::npos) {
+ EXPECT_TRUE(InputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" failed (as input channel mask)";
+ EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of input channel mask " << channelMask << " failed";
+ // Due to aliased values, the result of 'toString' might not be the same
+ // as 'stringVal', thus we need to compare the results of parsing instead.
+ EXPECT_TRUE(InputChannelConverter::fromString(stringValBack, channelMaskBack))
+ << "Conversion of \"" << stringValBack << "\" failed (as input channel mask)";
+ EXPECT_EQ(channelMask, channelMaskBack);
+ } else if (stringVal.find("_CHANNEL_OUT_") != std::string::npos) {
+ EXPECT_TRUE(OutputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" failed (as output channel mask)";
+ EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of output channel mask " << channelMask << " failed";
+ EXPECT_TRUE(OutputChannelConverter::fromString(stringValBack, channelMaskBack))
+ << "Conversion of \"" << stringValBack << "\" failed (as output channel mask)";
+ EXPECT_EQ(channelMask, channelMaskBack);
+ } else if (stringVal.find("_CHANNEL_INDEX_") != std::string::npos) {
+ EXPECT_TRUE(ChannelIndexConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" failed (as indexed channel mask)";
+ EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+ << "Conversion of indexed channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ } else if (stringVal == "AUDIO_CHANNEL_NONE") {
+ EXPECT_FALSE(InputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" succeeded (as input channel mask)";
+ EXPECT_FALSE(OutputChannelConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" succeeded (as output channel mask)";
+ EXPECT_FALSE(ChannelIndexConverter::fromString(stringVal, channelMask))
+ << "Conversion of \"" << stringVal << "\" succeeded (as index channel mask)";
+ // None of Converters could parse this because 'NONE' isn't a 'valid' channel mask.
+ channelMask = AUDIO_CHANNEL_NONE;
+ // However they all must succeed in converting it back.
+ EXPECT_TRUE(InputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of input channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ EXPECT_TRUE(OutputChannelConverter::toString(channelMask, stringValBack))
+ << "Conversion of output channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ EXPECT_TRUE(ChannelIndexConverter::toString(channelMask, stringValBack))
+ << "Conversion of indexed channel mask " << channelMask << " failed";
+ EXPECT_EQ(stringVal, stringValBack);
+ }
+ }
+}
+
+TEST(TypeConverter, ParseContentTypes) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioContentType>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_content_type_t contentType;
+ EXPECT_TRUE(AudioContentTypeConverter::fromString(stringVal, contentType))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(contentType));
+ }
+}
+
+TEST(TypeConverter, ParseDevices) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_devices_t device, deviceBack;
+ std::string stringValBack;
+ EXPECT_TRUE(DeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" failed";
+ if (stringVal != "AUDIO_DEVICE_NONE") {
+ EXPECT_TRUE(audio_is_input_device(device) || audio_is_output_device(device))
+ << "Device \"" << stringVal << "\" is neither input, nor output device";
+ } else {
+ EXPECT_FALSE(audio_is_input_device(device));
+ EXPECT_FALSE(audio_is_output_device(device));
+ }
+ // Due to aliased values, the result of 'toString' might not be the same
+ // as 'stringVal', thus we need to compare the results of parsing instead.
+ stringValBack = toString(device);
+ EXPECT_TRUE(DeviceConverter::fromString(stringValBack, deviceBack))
+ << "Conversion of \"" << stringValBack << "\" failed";
+ EXPECT_EQ(device, deviceBack);
+ }
+}
+
+TEST(TypeConverter, ParseInOutDevices) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioDevice>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_devices_t device, deviceBack;
+ std::string stringValBack;
+ if (stringVal.find("_DEVICE_IN_") != std::string::npos) {
+ EXPECT_TRUE(InputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" failed (as input device)";
+ // Due to aliased values, the result of 'toString' might not be the same
+ // as 'stringVal', thus we need to compare the results of parsing instead.
+ stringValBack = toString(device);
+ EXPECT_TRUE(InputDeviceConverter::fromString(stringValBack, deviceBack))
+ << "Conversion of \"" << stringValBack << "\" failed";
+ EXPECT_EQ(device, deviceBack);
+ } else if (stringVal.find("_DEVICE_OUT_") != std::string::npos) {
+ EXPECT_TRUE(OutputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" failed (as output device)";
+ stringValBack = toString(device);
+ EXPECT_TRUE(OutputDeviceConverter::fromString(stringValBack, deviceBack))
+ << "Conversion of \"" << stringValBack << "\" failed";
+ EXPECT_EQ(device, deviceBack);
+ } else if (stringVal == "AUDIO_DEVICE_NONE") {
+ EXPECT_FALSE(InputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" succeeded (as input device)";
+ EXPECT_FALSE(OutputDeviceConverter::fromString(stringVal, device))
+ << "Conversion of \"" << stringVal << "\" succeeded (as output device)";
+ EXPECT_EQ(stringVal, toString(device));
+ }
+ }
+}
+
+TEST (TypeConverter, ParseInOutFlags) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioInOutFlag>{}) {
+ const std::string stringVal = toString(enumVal);
+ if (stringVal.find("_INPUT_FLAG_") != std::string::npos) {
+ audio_input_flags_t flag;
+ EXPECT_TRUE(InputFlagConverter::fromString(stringVal, flag))
+ << "Conversion of \"" << stringVal << "\" failed (as input flag)";
+ EXPECT_EQ(stringVal, toString(flag));
+ } else {
+ audio_output_flags_t flag;
+ EXPECT_TRUE(OutputFlagConverter::fromString(stringVal, flag))
+ << "Conversion of \"" << stringVal << "\" failed (as output flag)";
+ EXPECT_EQ(stringVal, toString(flag));
+ }
+ }
+}
+
+TEST(TypeConverter, ParseFormats) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioFormat>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_format_t format;
+ EXPECT_TRUE(FormatConverter::fromString(stringVal, format))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_TRUE(audio_is_valid_format(format))
+ << "Converted format \"" << stringVal << "\" is invalid";
+ EXPECT_EQ(stringVal, toString(format));
+ }
+}
+
+TEST(TypeConverter, ParseGainModes) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioGainMode>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_gain_mode_t gainMode;
+ EXPECT_TRUE(GainModeConverter::fromString(stringVal, gainMode))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(gainMode));
+ }
+}
+
+TEST(TypeConverter, ParseSources) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioSource>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_source_t source;
+ EXPECT_TRUE(SourceTypeConverter::fromString(stringVal, source))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(source != AUDIO_SOURCE_DEFAULT, audio_is_valid_audio_source(source))
+ << "Validity of \"" << stringVal << "\" is not as expected";
+ EXPECT_EQ(stringVal, toString(source));
+ }
+}
+
+TEST(TypeConverter, ParseStreamTypes) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioStreamType>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_stream_type_t streamType;
+ EXPECT_TRUE(StreamTypeConverter::fromString(stringVal, streamType))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(streamType));
+ }
+}
+
+TEST(TypeConverter, ParseUsages) {
+ for (const auto enumVal : xsdc_enum_range<xsd::AudioUsage>{}) {
+ const std::string stringVal = toString(enumVal);
+ audio_usage_t usage;
+ EXPECT_TRUE(UsageTypeConverter::fromString(stringVal, usage))
+ << "Conversion of \"" << stringVal << "\" failed";
+ EXPECT_EQ(stringVal, toString(usage));
+ }
+}
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 09b9145..8e0c69f 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -174,9 +174,7 @@
ALOGV("getting track %zu of %zu, meta=%s", i, n, meta->toString().c_str());
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!strncasecmp(mime, "image/", 6)) {
+ if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "image/", 6)) {
int32_t isPrimary;
if ((index < 0 && meta->findInt32(
kKeyTrackIsDefault, &isPrimary) && isPrimary)
@@ -208,7 +206,10 @@
}
const char *mime;
- CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+ if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+ ALOGE("image track has no mime type");
+ return NULL;
+ }
ALOGV("extracting from %s track", mime);
if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
mime = MEDIA_MIMETYPE_VIDEO_HEVC;
@@ -299,9 +300,7 @@
}
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!strncasecmp(mime, "video/", 6)) {
+ if (meta->findCString(kKeyMIMEType, &mime) && !strncasecmp(mime, "video/", 6)) {
break;
}
}
@@ -337,7 +336,10 @@
}
const char *mime;
- CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
+ if (!trackMeta->findCString(kKeyMIMEType, &mime)) {
+ ALOGE("video track has no mime information.");
+ return NULL;
+ }
bool preferhw = property_get_bool(
"media.stagefright.thumbnail.prefer_hw_codecs", false);
@@ -531,7 +533,7 @@
int32_t audioBitrate = -1;
int32_t rotationAngle = -1;
int32_t imageCount = 0;
- int32_t imagePrimary = 0;
+ int32_t imagePrimary = -1;
int32_t imageWidth = -1;
int32_t imageHeight = -1;
int32_t imageRotation = -1;
@@ -574,29 +576,33 @@
mMetaData.add(METADATA_KEY_SAMPLERATE, String8(tmp));
}
} else if (!hasVideo && !strncasecmp("video/", mime, 6)) {
- hasVideo = true;
- videoMime = String8(mime);
-
- CHECK(trackMeta->findInt32(kKeyWidth, &videoWidth));
- CHECK(trackMeta->findInt32(kKeyHeight, &videoHeight));
if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
rotationAngle = 0;
}
if (!trackMeta->findInt32(kKeyFrameCount, &videoFrameCount)) {
videoFrameCount = 0;
}
-
- parseColorAspects(trackMeta);
+ if (trackMeta->findInt32(kKeyWidth, &videoWidth)
+ && trackMeta->findInt32(kKeyHeight, &videoHeight)) {
+ hasVideo = true;
+ videoMime = String8(mime);
+ parseColorAspects(trackMeta);
+ } else {
+ ALOGE("video track ignored for missing dimensions");
+ }
} else if (!strncasecmp("image/", mime, 6)) {
int32_t isPrimary;
if (trackMeta->findInt32(
kKeyTrackIsDefault, &isPrimary) && isPrimary) {
- imagePrimary = imageCount;
- CHECK(trackMeta->findInt32(kKeyWidth, &imageWidth));
- CHECK(trackMeta->findInt32(kKeyHeight, &imageHeight));
if (!trackMeta->findInt32(kKeyRotation, &imageRotation)) {
imageRotation = 0;
}
+ if (trackMeta->findInt32(kKeyWidth, &imageWidth)
+ && trackMeta->findInt32(kKeyHeight, &imageHeight)) {
+ imagePrimary = imageCount;
+ } else {
+ ALOGE("primary image track ignored for missing dimensions");
+ }
}
imageCount++;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
@@ -629,9 +635,11 @@
if (hasVideo) {
mMetaData.add(METADATA_KEY_HAS_VIDEO, String8("yes"));
+ CHECK(videoWidth >= 0);
sprintf(tmp, "%d", videoWidth);
mMetaData.add(METADATA_KEY_VIDEO_WIDTH, String8(tmp));
+ CHECK(videoHeight >= 0);
sprintf(tmp, "%d", videoHeight);
mMetaData.add(METADATA_KEY_VIDEO_HEIGHT, String8(tmp));
@@ -646,7 +654,8 @@
}
}
- if (imageCount > 0) {
+ // only if we have a primary image
+ if (imageCount > 0 && imagePrimary >= 0) {
mMetaData.add(METADATA_KEY_HAS_IMAGE, String8("yes"));
sprintf(tmp, "%d", imageCount);
@@ -655,9 +664,11 @@
sprintf(tmp, "%d", imagePrimary);
mMetaData.add(METADATA_KEY_IMAGE_PRIMARY, String8(tmp));
+ CHECK(imageWidth >= 0);
sprintf(tmp, "%d", imageWidth);
mMetaData.add(METADATA_KEY_IMAGE_WIDTH, String8(tmp));
+ CHECK(imageHeight >= 0);
sprintf(tmp, "%d", imageHeight);
mMetaData.add(METADATA_KEY_IMAGE_HEIGHT, String8(tmp));
@@ -685,10 +696,9 @@
!strcasecmp(fileMIME, "video/x-matroska")) {
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(0);
const char *trackMIME;
- if (trackMeta != nullptr) {
- CHECK(trackMeta->findCString(kKeyMIMEType, &trackMIME));
- }
- if (!strncasecmp("audio/", trackMIME, 6)) {
+ if (trackMeta != nullptr
+ && trackMeta->findCString(kKeyMIMEType, &trackMIME)
+ && !strncasecmp("audio/", trackMIME, 6)) {
// The matroska file only contains a single audio track,
// rewrite its mime type.
mMetaData.add(
diff --git a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
index ac17ef3..5751631 100644
--- a/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
+++ b/media/libmediaplayerservice/tests/stagefrightRecorder/StagefrightRecorderTest.cpp
@@ -29,7 +29,7 @@
#include <MediaPlayerService.h>
#include <media/NdkMediaExtractor.h>
#include <media/stagefright/MediaCodec.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
#include "StagefrightRecorder.h"
diff --git a/media/libmediatranscoding/TranscoderWrapper.cpp b/media/libmediatranscoding/TranscoderWrapper.cpp
index 61e767c..fffbfe9 100644
--- a/media/libmediatranscoding/TranscoderWrapper.cpp
+++ b/media/libmediatranscoding/TranscoderWrapper.cpp
@@ -155,7 +155,7 @@
}
virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
- const std::shared_ptr<const Parcel>& pausedState
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState
__unused) override {
ALOGV("%s: session {%lld, %d}", __FUNCTION__, (long long)mClientId, mSessionId);
}
@@ -189,7 +189,7 @@
auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
if (it == mPausedStateMap.end()) {
mPausedStateMap.emplace(SessionKeyType(clientId, sessionId),
- std::shared_ptr<const Parcel>());
+ new ndk::ScopedAParcel());
}
callback->onResourceLost();
@@ -316,7 +316,7 @@
media_status_t TranscoderWrapper::setupTranscoder(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
const std::shared_ptr<ITranscodingClientCallback>& clientCb,
- const std::shared_ptr<const Parcel>& pausedState) {
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
if (clientCb == nullptr) {
ALOGE("client callback is null");
return AMEDIA_ERROR_INVALID_PARAMETER;
@@ -426,7 +426,7 @@
ALOGI("%s: pausing transcoder", __FUNCTION__);
- std::shared_ptr<const Parcel> pauseStates;
+ std::shared_ptr<ndk::ScopedAParcel> pauseStates;
media_status_t err = mTranscoder->pause(&pauseStates);
if (err != AMEDIA_OK) {
ALOGE("%s: failed to pause transcoder: %d", __FUNCTION__, err);
@@ -441,7 +441,7 @@
media_status_t TranscoderWrapper::handleResume(
ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
const std::shared_ptr<ITranscodingClientCallback>& clientCb) {
- std::shared_ptr<const Parcel> pausedState;
+ std::shared_ptr<ndk::ScopedAParcel> pausedState;
auto it = mPausedStateMap.find(SessionKeyType(clientId, sessionId));
if (it != mPausedStateMap.end()) {
pausedState = it->second;
diff --git a/media/libmediatranscoding/TranscodingClientManager.cpp b/media/libmediatranscoding/TranscodingClientManager.cpp
index b57baa5..ae1f7a5 100644
--- a/media/libmediatranscoding/TranscodingClientManager.cpp
+++ b/media/libmediatranscoding/TranscodingClientManager.cpp
@@ -31,7 +31,10 @@
static_assert(sizeof(ClientIdType) == sizeof(void*), "ClientIdType should be pointer-sized");
-static constexpr const char* MEDIA_PROVIDER_PKG_NAME = "com.google.android.providers.media.module";
+static constexpr const char* MEDIA_PROVIDER_PKG_NAMES[] = {
+ "com.android.providers.media.module",
+ "com.google.android.providers.media.module",
+};
using ::aidl::android::media::BnTranscodingClient;
using ::aidl::android::media::IMediaTranscodingService; // For service error codes
@@ -261,16 +264,16 @@
TranscodingClientManager::TranscodingClientManager(
const std::shared_ptr<ControllerClientInterface>& controller)
: mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)),
- mSessionController(controller),
- mMediaProviderUid(-1) {
+ mSessionController(controller) {
ALOGD("TranscodingClientManager started");
uid_t mpuid;
- if (TranscodingUidPolicy::getUidForPackage(String16(MEDIA_PROVIDER_PKG_NAME), mpuid) ==
- NO_ERROR) {
- ALOGI("Found MediaProvider uid: %d", mpuid);
- mMediaProviderUid = mpuid;
- } else {
- ALOGW("Couldn't get uid for MediaProvider.");
+ for (const char* pkgName : MEDIA_PROVIDER_PKG_NAMES) {
+ if (TranscodingUidPolicy::getUidForPackage(String16(pkgName), mpuid) == NO_ERROR) {
+ ALOGI("Found %s's uid: %d", pkgName, mpuid);
+ mMediaProviderUid.insert(mpuid);
+ } else {
+ ALOGW("Couldn't get uid for %s.", pkgName);
+ }
}
}
@@ -303,7 +306,7 @@
}
bool TranscodingClientManager::isTrustedCallingUid(uid_t uid) {
- if (uid > 0 && uid == mMediaProviderUid) {
+ if (uid > 0 && mMediaProviderUid.count(uid) > 0) {
return true;
}
diff --git a/media/libmediatranscoding/include/media/TranscoderWrapper.h b/media/libmediatranscoding/include/media/TranscoderWrapper.h
index 6bf6b56..9ec32d7 100644
--- a/media/libmediatranscoding/include/media/TranscoderWrapper.h
+++ b/media/libmediatranscoding/include/media/TranscoderWrapper.h
@@ -66,7 +66,7 @@
std::mutex mLock;
std::condition_variable mCondition;
std::list<Event> mQueue; // GUARDED_BY(mLock);
- std::map<SessionKeyType, std::shared_ptr<const Parcel>> mPausedStateMap;
+ std::map<SessionKeyType, std::shared_ptr<ndk::ScopedAParcel>> mPausedStateMap;
ClientIdType mCurrentClientId;
SessionIdType mCurrentSessionId;
@@ -82,10 +82,10 @@
media_status_t handleResume(ClientIdType clientId, SessionIdType sessionId,
const TranscodingRequestParcel& request,
const std::shared_ptr<ITranscodingClientCallback>& callback);
- media_status_t setupTranscoder(ClientIdType clientId, SessionIdType sessionId,
- const TranscodingRequestParcel& request,
- const std::shared_ptr<ITranscodingClientCallback>& callback,
- const std::shared_ptr<const Parcel>& pausedState = nullptr);
+ media_status_t setupTranscoder(
+ ClientIdType clientId, SessionIdType sessionId, const TranscodingRequestParcel& request,
+ const std::shared_ptr<ITranscodingClientCallback>& callback,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
void cleanup();
void reportError(ClientIdType clientId, SessionIdType sessionId, media_status_t err);
diff --git a/media/libmediatranscoding/include/media/TranscodingClientManager.h b/media/libmediatranscoding/include/media/TranscodingClientManager.h
index 5feeae9..451f993 100644
--- a/media/libmediatranscoding/include/media/TranscodingClientManager.h
+++ b/media/libmediatranscoding/include/media/TranscodingClientManager.h
@@ -109,7 +109,7 @@
::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
std::shared_ptr<ControllerClientInterface> mSessionController;
- uid_t mMediaProviderUid;
+ std::unordered_set<uid_t> mMediaProviderUid;
static std::atomic<ClientIdType> sCookieCounter;
static std::mutex sCookie2ClientLock;
diff --git a/media/libmediatranscoding/transcoder/Android.bp b/media/libmediatranscoding/transcoder/Android.bp
index 258ed9a..1896412 100644
--- a/media/libmediatranscoding/transcoder/Android.bp
+++ b/media/libmediatranscoding/transcoder/Android.bp
@@ -34,8 +34,7 @@
"libmediandk",
"libnativewindow",
"libutils",
- // TODO: Use libbinder_ndk
- "libbinder",
+ "libbinder_ndk",
],
export_include_dirs: [
diff --git a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
index cdb8368..d89b58f 100644
--- a/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/MediaTranscoder.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "MediaTranscoder"
#include <android-base/logging.h>
-#include <binder/Parcel.h>
#include <fcntl.h>
#include <media/MediaSampleReaderNDK.h>
#include <media/MediaSampleWriter.h>
@@ -160,7 +159,7 @@
std::shared_ptr<MediaTranscoder> MediaTranscoder::create(
const std::shared_ptr<CallbackInterface>& callbacks,
- const std::shared_ptr<const Parcel>& pausedState) {
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState) {
if (pausedState != nullptr) {
LOG(INFO) << "Initializing from paused state.";
}
@@ -325,9 +324,9 @@
return AMEDIA_OK;
}
-media_status_t MediaTranscoder::pause(std::shared_ptr<const Parcel>* pausedState) {
+media_status_t MediaTranscoder::pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState) {
// TODO: write internal states to parcel.
- *pausedState = std::make_shared<Parcel>();
+ *pausedState = std::shared_ptr<::ndk::ScopedAParcel>(new ::ndk::ScopedAParcel());
return cancel();
}
diff --git a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
index f985a28..ede86cf 100644
--- a/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
+++ b/media/libmediatranscoding/transcoder/benchmark/MediaTranscoderBenchmark.cpp
@@ -55,7 +55,7 @@
int32_t progress __unused) override {}
virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
- const std::shared_ptr<const Parcel>& pausedState
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState
__unused) override {}
bool waitForTranscodingFinished() {
diff --git a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
index 9a367ca..555cfce 100644
--- a/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
+++ b/media/libmediatranscoding/transcoder/include/media/MediaTranscoder.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_MEDIA_TRANSCODER_H
#define ANDROID_MEDIA_TRANSCODER_H
+#include <android/binder_auto_utils.h>
#include <media/MediaSampleWriter.h>
#include <media/MediaTrackTranscoderCallback.h>
#include <media/NdkMediaError.h>
@@ -31,7 +32,6 @@
namespace android {
class MediaSampleReader;
-class Parcel;
class MediaTranscoder : public std::enable_shared_from_this<MediaTranscoder>,
public MediaTrackTranscoderCallback,
@@ -56,8 +56,9 @@
* 2) Creating a new MediaTranscoding instance with the paused state and then calling
* resume.
*/
- virtual void onCodecResourceLost(const MediaTranscoder* transcoder,
- const std::shared_ptr<const Parcel>& pausedState) = 0;
+ virtual void onCodecResourceLost(
+ const MediaTranscoder* transcoder,
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState) = 0;
virtual ~CallbackInterface() = default;
};
@@ -69,7 +70,7 @@
*/
static std::shared_ptr<MediaTranscoder> create(
const std::shared_ptr<CallbackInterface>& callbacks,
- const std::shared_ptr<const Parcel>& pausedState = nullptr);
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState = nullptr);
/** Configures source from path fd. */
media_status_t configureSource(int fd);
@@ -102,12 +103,8 @@
* release the transcoder instance, clear the paused state and delete the partial destination
* file. The caller can optionally call cancel to let the transcoder clean up the partial
* destination file.
- *
- * TODO: use NDK AParcel instead
- * libbinder shouldn't be used by mainline modules. When transcoding goes mainline
- * it needs to be replaced by stable AParcel.
*/
- media_status_t pause(std::shared_ptr<const Parcel>* pausedState);
+ media_status_t pause(std::shared_ptr<ndk::ScopedAParcel>* pausedState);
/** Resumes a paused transcoding. */
media_status_t resume();
diff --git a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
index 7a968eb..1bf2d8c 100644
--- a/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
+++ b/media/libmediatranscoding/transcoder/tests/MediaTranscoderTests.cpp
@@ -82,7 +82,7 @@
}
virtual void onCodecResourceLost(const MediaTranscoder* transcoder __unused,
- const std::shared_ptr<const Parcel>& pausedState
+ const std::shared_ptr<ndk::ScopedAParcel>& pausedState
__unused) override {}
void waitForTranscodingFinished() {
diff --git a/media/libshmem/Android.bp b/media/libshmem/Android.bp
index fae98ed..c8d2284 100644
--- a/media/libshmem/Android.bp
+++ b/media/libshmem/Android.bp
@@ -41,6 +41,7 @@
srcs: ["ShmemTest.cpp"],
shared_libs: [
"libbinder",
+ "libcutils",
"libshmemcompat",
"libshmemutil",
"libutils",
diff --git a/media/libshmem/ShmemCompat.cpp b/media/libshmem/ShmemCompat.cpp
index 5dd83f4..246cb24 100644
--- a/media/libshmem/ShmemCompat.cpp
+++ b/media/libshmem/ShmemCompat.cpp
@@ -24,15 +24,12 @@
bool convertSharedFileRegionToIMemory(const SharedFileRegion& shmem,
sp<IMemory>* result) {
+ assert(result != nullptr);
+
if (!validateSharedFileRegion(shmem)) {
return false;
}
- if (shmem.fd.get() < 0) {
- *result = nullptr;
- return true;
- }
-
// Heap offset and size must be page aligned.
const size_t pageSize = getpagesize();
const size_t pageMask = ~(pageSize - 1);
@@ -52,8 +49,10 @@
return false;
}
+ uint32_t flags = !shmem.writeable ? IMemoryHeap::READ_ONLY : 0;
+
const sp<MemoryHeapBase> heap =
- new MemoryHeapBase(shmem.fd.get(), heapSize, 0, heapStartOffset);
+ new MemoryHeapBase(shmem.fd.get(), heapSize, flags, heapStartOffset);
*result = sp<MemoryBase>::make(heap,
shmem.offset - heapStartOffset,
shmem.size);
@@ -62,16 +61,19 @@
bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
SharedFileRegion* result) {
+ assert(mem != nullptr);
+ assert(result != nullptr);
+
*result = SharedFileRegion();
- if (mem == nullptr) {
- return true;
- }
ssize_t offset;
size_t size;
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- if (heap != nullptr) {
+ if (size > 0) {
+ if (heap == nullptr) {
+ return false;
+ }
// Make sure the offset and size do not overflow from int64 boundaries.
if (size > std::numeric_limits<int64_t>::max() ||
offset > std::numeric_limits<int64_t>::max() ||
@@ -89,9 +91,34 @@
result->fd.reset(base::unique_fd(fd));
result->size = size;
result->offset = heap->getOffset() + offset;
+ result->writeable = (heap->getFlags() & IMemoryHeap::READ_ONLY) == 0;
+ }
+ return true;
+}
+
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+ sp<IMemory>* result) {
+ assert(result != nullptr);
+
+ if (!shmem.has_value()) {
+ result->clear();
+ return true;
}
- return true;
+ return convertSharedFileRegionToIMemory(shmem.value(), result);
+}
+
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+ std::optional<SharedFileRegion>* result) {
+ assert(result != nullptr);
+
+ if (mem == nullptr) {
+ result->reset();
+ return true;
+ }
+
+ result->emplace();
+ return convertIMemoryToSharedFileRegion(mem, &result->value());
}
} // namespace media
diff --git a/media/libshmem/ShmemTest.cpp b/media/libshmem/ShmemTest.cpp
index 4f11b51..874f34c 100644
--- a/media/libshmem/ShmemTest.cpp
+++ b/media/libshmem/ShmemTest.cpp
@@ -17,6 +17,7 @@
#include "binder/MemoryBase.h"
#include "binder/MemoryHeapBase.h"
+#include "cutils/ashmem.h"
#include "media/ShmemCompat.h"
#include "media/ShmemUtil.h"
@@ -24,18 +25,30 @@
namespace media {
namespace {
-// Creates a SharedFileRegion instance with a null FD.
+// Creates a SharedFileRegion instance.
SharedFileRegion makeSharedFileRegion(int64_t offset, int64_t size) {
SharedFileRegion shmem;
shmem.offset = offset;
shmem.size = size;
+ int fd = ashmem_create_region("", size + offset);
+ assert(fd >= 0);
+ shmem.fd = os::ParcelFileDescriptor(base::unique_fd(fd));
+ return shmem;
+}
+
+// Creates a SharedFileRegion instance with an invalid FD.
+SharedFileRegion makeInvalidSharedFileRegion(int64_t offset, int64_t size) {
+ SharedFileRegion shmem;
+ shmem.offset = offset;
+ shmem.size = size;
return shmem;
}
-sp<IMemory> makeIMemory(const std::vector<uint8_t>& content) {
+sp<IMemory> makeIMemory(const std::vector<uint8_t>& content, bool writeable = true) {
constexpr size_t kOffset = 19;
- sp<MemoryHeapBase> heap = new MemoryHeapBase(content.size());
+ sp<MemoryHeapBase> heap = new MemoryHeapBase(content.size(),
+ !writeable ? IMemoryHeap::READ_ONLY : 0);
sp<IMemory> result = sp<MemoryBase>::make(heap, kOffset, content.size());
memcpy(result->unsecurePointer(), content.data(), content.size());
return result;
@@ -46,9 +59,7 @@
EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(1, 2)));
EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(-1, 2)));
EXPECT_FALSE(validateSharedFileRegion(makeSharedFileRegion(2, -1)));
- EXPECT_TRUE(validateSharedFileRegion(makeSharedFileRegion(
- std::numeric_limits<int64_t>::max(),
- std::numeric_limits<int64_t>::max())));
+ EXPECT_FALSE(validateSharedFileRegion(makeInvalidSharedFileRegion(1, 2)));
}
TEST(ShmemTest, Conversion) {
@@ -59,9 +70,31 @@
ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
ASSERT_EQ(3, shmem.size);
ASSERT_GE(shmem.fd.get(), 0);
+ ASSERT_TRUE(shmem.writeable);
ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
}
ASSERT_EQ(3, reconstructed->size());
+ ASSERT_EQ(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY, 0);
+ const uint8_t* p =
+ reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
+ EXPECT_EQ(6, p[0]);
+ EXPECT_EQ(5, p[1]);
+ EXPECT_EQ(3, p[2]);
+}
+
+TEST(ShmemTest, ConversionReadOnly) {
+ sp<IMemory> reconstructed;
+ {
+ SharedFileRegion shmem;
+ sp<IMemory> imem = makeIMemory({6, 5, 3}, false);
+ ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
+ ASSERT_EQ(3, shmem.size);
+ ASSERT_GE(shmem.fd.get(), 0);
+ ASSERT_FALSE(shmem.writeable);
+ ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+ }
+ ASSERT_EQ(3, reconstructed->size());
+ ASSERT_NE(reconstructed->getMemory()->getFlags() & IMemoryHeap::READ_ONLY, 0);
const uint8_t* p =
reinterpret_cast<const uint8_t*>(reconstructed->unsecurePointer());
EXPECT_EQ(6, p[0]);
@@ -72,12 +105,11 @@
TEST(ShmemTest, NullConversion) {
sp<IMemory> reconstructed;
{
- SharedFileRegion shmem;
+ std::optional<SharedFileRegion> shmem;
sp<IMemory> imem;
- ASSERT_TRUE(convertIMemoryToSharedFileRegion(imem, &shmem));
- ASSERT_EQ(0, shmem.size);
- ASSERT_LT(shmem.fd.get(), 0);
- ASSERT_TRUE(convertSharedFileRegionToIMemory(shmem, &reconstructed));
+ ASSERT_TRUE(convertNullableIMemoryToSharedFileRegion(imem, &shmem));
+ ASSERT_FALSE(shmem.has_value());
+ ASSERT_TRUE(convertNullableSharedFileRegionToIMemory(shmem, &reconstructed));
}
ASSERT_EQ(nullptr, reconstructed);
}
diff --git a/media/libshmem/ShmemUtil.cpp b/media/libshmem/ShmemUtil.cpp
index a6d047f..e075346 100644
--- a/media/libshmem/ShmemUtil.cpp
+++ b/media/libshmem/ShmemUtil.cpp
@@ -19,6 +19,11 @@
namespace media {
bool validateSharedFileRegion(const SharedFileRegion& shmem) {
+ // FD must be valid.
+ if (shmem.fd.get() < 0) {
+ return false;
+ }
+
// Size and offset must be non-negative.
if (shmem.size < 0 || shmem.offset < 0) {
return false;
diff --git a/media/libshmem/aidl/android/media/SharedFileRegion.aidl b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
index c99ad95..199b647 100644
--- a/media/libshmem/aidl/android/media/SharedFileRegion.aidl
+++ b/media/libshmem/aidl/android/media/SharedFileRegion.aidl
@@ -20,16 +20,20 @@
* A shared file region.
*
* This type contains the required information to share a region of a file between processes over
- * AIDL. An invalid (null) region may be represented using a negative file descriptor.
+ * AIDL.
+ * An instance of this type represents a valid FD. For representing a null SharedFileRegion, use a
+ * @nullable SharedFileRegion.
* Primarily, this is intended for shared memory blocks.
*
* @hide
*/
parcelable SharedFileRegion {
- /** File descriptor of the region. */
+ /** File descriptor of the region. Must be valid. */
ParcelFileDescriptor fd;
/** Offset, in bytes within the file of the start of the region. Must be non-negative. */
long offset;
/** Size, in bytes of the memory region. Must be non-negative. */
long size;
+ /** Whether the region is writeable. */
+ boolean writeable;
}
diff --git a/media/libshmem/include/media/ShmemCompat.h b/media/libshmem/include/media/ShmemCompat.h
index 3bf7f67..ba59f25 100644
--- a/media/libshmem/include/media/ShmemCompat.h
+++ b/media/libshmem/include/media/ShmemCompat.h
@@ -19,6 +19,8 @@
// This module contains utilities for interfacing between legacy code that is using IMemory and new
// code that is using android.os.SharedFileRegion.
+#include <optional>
+
#include "android/media/SharedFileRegion.h"
#include "binder/IMemory.h"
#include "utils/StrongPointer.h"
@@ -29,8 +31,7 @@
/**
* Converts a SharedFileRegion parcelable to an IMemory instance.
* @param shmem The SharedFileRegion instance.
- * @param result The resulting IMemory instance, or null of the SharedFileRegion is null (has a
- * negative FD).
+ * @param result The resulting IMemory instance. May not be null.
* @return true if the conversion is successful (should always succeed under normal circumstances,
* failure usually means corrupt data).
*/
@@ -38,8 +39,19 @@
sp<IMemory>* result);
/**
+ * Converts a nullable SharedFileRegion parcelable to an IMemory instance.
+ * @param shmem The SharedFileRegion instance.
+ * @param result The resulting IMemory instance. May not be null. Pointee assigned to null,
+ * if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ * failure usually means corrupt data).
+ */
+bool convertNullableSharedFileRegionToIMemory(const std::optional<SharedFileRegion>& shmem,
+ sp<IMemory>* result);
+
+/**
* Converts an IMemory instance to SharedFileRegion.
- * @param mem The IMemory instance. May be null.
+ * @param mem The IMemory instance. May not be null.
* @param result The resulting SharedFileRegion instance.
* @return true if the conversion is successful (should always succeed under normal circumstances,
* failure usually means corrupt data).
@@ -47,5 +59,16 @@
bool convertIMemoryToSharedFileRegion(const sp<IMemory>& mem,
SharedFileRegion* result);
+/**
+ * Converts a nullable IMemory instance to a nullable SharedFileRegion.
+ * @param mem The IMemory instance. May be null.
+ * @param result The resulting SharedFileRegion instance. May not be null. Assigned to empty,
+ * if the input is null.
+ * @return true if the conversion is successful (should always succeed under normal circumstances,
+ * failure usually means corrupt data).
+ */
+bool convertNullableIMemoryToSharedFileRegion(const sp<IMemory>& mem,
+ std::optional<SharedFileRegion>* result);
+
} // namespace media
} // namespace android
diff --git a/media/libshmem/include/media/ShmemUtil.h b/media/libshmem/include/media/ShmemUtil.h
index 563cb71..3a7a5a5 100644
--- a/media/libshmem/include/media/ShmemUtil.h
+++ b/media/libshmem/include/media/ShmemUtil.h
@@ -25,7 +25,6 @@
/**
* Checks whether a SharedFileRegion instance is valid (all the fields have sane values).
- * A null SharedFileRegion (having a negative FD) is considered valid.
*/
bool validateSharedFileRegion(const SharedFileRegion& shmem);
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
index 0ba4944..dbaf5d1 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/idct_vca.cpp
@@ -37,6 +37,7 @@
return ;
}
+__attribute__((no_sanitize("signed-integer-overflow")))
void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
{
/* shortcut */
@@ -156,6 +157,7 @@
return ;
}
+__attribute__((no_sanitize("signed-integer-overflow")))
void idctcol2(int16 *blk)
{
int32 x0, x1, x3, x5, x7;//, x8;
@@ -256,6 +258,7 @@
return ;
}
+__attribute__((no_sanitize("signed-integer-overflow")))
void idctcol3(int16 *blk)
{
int32 x0, x1, x2, x3, x4, x5, x6, x7, x8;
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index d0e0cc7..755d6e6 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -54,6 +54,7 @@
cc_library_shared {
name: "libmediandk",
+ llndk_stubs: "libmediandk.llndk",
srcs: [
"NdkJavaVMHelper.cpp",
@@ -134,7 +135,7 @@
}
llndk_library {
- name: "libmediandk",
+ name: "libmediandk.llndk",
symbol_file: "libmediandk.map.txt",
export_include_dirs: [
"include",
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 12f6eba..261af5a 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -35,6 +35,9 @@
],
shared_libs: [
+ "audioflinger-aidl-unstable-cpp",
+ "audioclient-types-aidl-unstable-cpp",
+ "libaudioclient_aidl_conversion",
"libaudiofoundation",
"libaudiohal",
"libaudioprocessing",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index eae9437..e589eb9 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -22,6 +22,13 @@
// Define AUDIO_ARRAYS_STATIC_CHECK to check all audio arrays are correct
#define AUDIO_ARRAYS_STATIC_CHECK 1
+#define VALUE_OR_FATAL(result) \
+ ({ auto _tmp = (result); \
+ LOG_ALWAYS_FATAL_IF(!_tmp.ok(), \
+ "Failed result (%d)", \
+ _tmp.error()); \
+ _tmp.value(); })
+
#include "Configuration.h"
#include <dirent.h>
#include <math.h>
@@ -68,6 +75,7 @@
#include <powermanager/PowerManager.h>
#include <media/IMediaLogService.h>
+#include <media/AidlConversion.h>
#include <media/nbaio/Pipe.h>
#include <media/nbaio/PipeReader.h>
#include <mediautils/BatteryNotifier.h>
@@ -1774,7 +1782,7 @@
return BAD_VALUE;
}
-void AudioFlinger::registerClient(const sp<IAudioFlingerClient>& client)
+void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
{
Mutex::Autolock _l(mLock);
if (client == 0) {
@@ -1849,13 +1857,18 @@
void AudioFlinger::ioConfigChanged(audio_io_config_event event,
const sp<AudioIoDescriptor>& ioDesc,
- pid_t pid)
-{
+ pid_t pid) {
+ media::AudioIoDescriptor descAidl = VALUE_OR_FATAL(
+ legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
+ media::AudioIoConfigEvent eventAidl = VALUE_OR_FATAL(
+ legacy2aidl_audio_io_config_event_AudioIoConfigEvent(event));
+
Mutex::Autolock _l(mClientLock);
size_t size = mNotificationClients.size();
for (size_t i = 0; i < size; i++) {
if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
- mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(event, ioDesc);
+ mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
+ descAidl);
}
}
}
@@ -1929,7 +1942,7 @@
// ----------------------------------------------------------------------------
AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
- const sp<IAudioFlingerClient>& client,
+ const sp<media::IAudioFlingerClient>& client,
pid_t pid,
uid_t uid)
: mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 14a4df7..65d672a 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,16 +33,16 @@
#include <sys/types.h>
#include <limits.h>
+#include <android/media/IAudioFlingerClient.h>
#include <android/media/IAudioTrackCallback.h>
#include <android/os/BnExternalVibrationController.h>
-#include <android-base/macros.h>
+#include <android-base/macros.h>
#include <cutils/atomic.h>
#include <cutils/compiler.h>
-#include <cutils/properties.h>
+#include <cutils/properties.h>
#include <media/IAudioFlinger.h>
-#include <media/IAudioFlingerClient.h>
#include <media/IAudioTrack.h>
#include <media/AudioSystem.h>
#include <media/AudioTrack.h>
@@ -177,7 +177,7 @@
virtual status_t setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs);
virtual String8 getParameters(audio_io_handle_t ioHandle, const String8& keys) const;
- virtual void registerClient(const sp<IAudioFlingerClient>& client);
+ virtual void registerClient(const sp<media::IAudioFlingerClient>& client);
virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask) const;
@@ -490,12 +490,12 @@
class NotificationClient : public IBinder::DeathRecipient {
public:
NotificationClient(const sp<AudioFlinger>& audioFlinger,
- const sp<IAudioFlingerClient>& client,
+ const sp<media::IAudioFlingerClient>& client,
pid_t pid,
uid_t uid);
virtual ~NotificationClient();
- sp<IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
+ sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
pid_t getPid() const { return mPid; }
uid_t getUid() const { return mUid; }
@@ -508,7 +508,7 @@
const sp<AudioFlinger> mAudioFlinger;
const pid_t mPid;
const uid_t mUid;
- const sp<IAudioFlingerClient> mAudioFlingerClient;
+ const sp<media::IAudioFlingerClient> mAudioFlingerClient;
};
// --- MediaLogNotifier ---
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 46969ef..c1c3c44 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -1914,9 +1914,8 @@
: AUDIO_DEVICE_NONE));
}
- // ++ operator does not compile
- for (audio_stream_type_t stream = AUDIO_STREAM_MIN; stream < AUDIO_STREAM_FOR_POLICY_CNT;
- stream = (audio_stream_type_t) (stream + 1)) {
+ for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+ const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
mStreamTypes[stream].volume = 0.0f;
mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
}
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index 395bc70..cf1f64c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -33,6 +33,15 @@
namespace android {
+// This class gathers together various bits of AudioPolicyManager
+// configuration, which are usually filled out as a result of parsing
+// the audio_policy_configuration.xml file.
+//
+// Note that AudioPolicyConfig doesn't own some of the data,
+// it simply proxies access to the fields of AudioPolicyManager
+// class. Be careful about the fields that are references,
+// e.g. 'mOutputDevices'. This also means that it's impossible
+// to implement "deep copying" of this class without re-designing it.
class AudioPolicyConfig
{
public:
@@ -40,14 +49,24 @@
DeviceVector &outputDevices,
DeviceVector &inputDevices,
sp<DeviceDescriptor> &defaultOutputDevice)
- : mEngineLibraryNameSuffix(kDefaultEngineLibraryNameSuffix),
- mHwModules(hwModules),
+ : mHwModules(hwModules),
mOutputDevices(outputDevices),
mInputDevices(inputDevices),
- mDefaultOutputDevice(defaultOutputDevice),
- mIsSpeakerDrcEnabled(false),
- mIsCallScreenModeSupported(false)
- {}
+ mDefaultOutputDevice(defaultOutputDevice) {
+ clear();
+ }
+
+ void clear() {
+ mSource = {};
+ mEngineLibraryNameSuffix = kDefaultEngineLibraryNameSuffix;
+ mHwModules.clear();
+ mOutputDevices.clear();
+ mInputDevices.clear();
+ mDefaultOutputDevice.clear();
+ mIsSpeakerDrcEnabled = false;
+ mIsCallScreenModeSupported = false;
+ mSurroundFormats.clear();
+ }
const std::string& getSource() const {
return mSource;
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index bf1a0f7..ae92b40 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -17,7 +17,7 @@
#define LOG_TAG "APM::IOProfile"
//#define LOG_NDEBUG 0
-#include <system/audio-base.h>
+#include <system/audio.h>
#include "IOProfile.h"
#include "HwModule.h"
#include "TypeConverter.h"
@@ -112,12 +112,11 @@
dst->append(portStr.c_str());
dst->appendFormat(" - flags: 0x%04x", getFlags());
- std::string flagsLiteral;
- if (getRole() == AUDIO_PORT_ROLE_SINK) {
- InputFlagConverter::maskToString(getFlags(), flagsLiteral);
- } else if (getRole() == AUDIO_PORT_ROLE_SOURCE) {
- OutputFlagConverter::maskToString(getFlags(), flagsLiteral);
- }
+ std::string flagsLiteral =
+ getRole() == AUDIO_PORT_ROLE_SINK ?
+ toString(static_cast<audio_input_flags_t>(getFlags())) :
+ getRole() == AUDIO_PORT_ROLE_SOURCE ?
+ toString(static_cast<audio_output_flags_t>(getFlags())) : "";
if (!flagsLiteral.empty()) {
dst->appendFormat(" (%s)", flagsLiteral.c_str());
}
diff --git a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
index 889f031..0cc3a68 100644
--- a/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/Serializer.cpp
@@ -254,9 +254,8 @@
constexpr void (*xmlDeleter)(T* t);
template <>
constexpr auto xmlDeleter<xmlDoc> = xmlFreeDoc;
-// http://b/111067277 - Add back constexpr when we switch to C++17.
template <>
-auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
+constexpr auto xmlDeleter<xmlChar> = [](xmlChar *s) { xmlFree(s); };
/** @return a unique_ptr with the correct deleter for the libxml2 object. */
template <class T>
@@ -337,7 +336,7 @@
std::string mode = getXmlAttribute(cur, Attributes::mode);
if (!mode.empty()) {
- gain->setMode(static_cast<audio_gain_mode_t>(GainModeConverter::maskFromString(mode)));
+ gain->setMode(GainModeConverter::maskFromString(mode, " "));
}
std::string channelsLiteral = getXmlAttribute(cur, Attributes::channelMask);
@@ -501,7 +500,7 @@
AUDIO_PORT_ROLE_SOURCE : AUDIO_PORT_ROLE_SINK;
audio_devices_t type = AUDIO_DEVICE_NONE;
- if (!deviceFromString(typeName, type) ||
+ if (!DeviceConverter::fromString(typeName, type) ||
(!audio_is_input_device(type) && portRole == AUDIO_PORT_ROLE_SOURCE) ||
(!audio_is_output_devices(type) && portRole == AUDIO_PORT_ROLE_SINK)) {
ALOGW("%s: bad type %08x", __func__, type);
@@ -804,7 +803,9 @@
status_t deserializeAudioPolicyFile(const char *fileName, AudioPolicyConfig *config)
{
PolicySerializer serializer;
- return serializer.deserialize(fileName, config);
+ status_t status = serializer.deserialize(fileName, config);
+ if (status != OK) config->clear();
+ return status;
}
} // namespace android
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 1875c10..8c7fb97 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -19,7 +19,6 @@
#include "EngineBase.h"
#include "EngineDefaultConfig.h"
-#include "../include/EngineBase.h"
#include <TypeConverter.h>
namespace android {
diff --git a/services/audiopolicy/engine/config/TEST_MAPPING b/services/audiopolicy/engine/config/TEST_MAPPING
new file mode 100644
index 0000000..06ce111
--- /dev/null
+++ b/services/audiopolicy/engine/config/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "audiopolicy_engineconfig_tests"
+ }
+ ]
+}
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 5d22c24..c565926 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -111,6 +111,8 @@
*/
ParsingResult parse(const char* path = DEFAULT_PATH);
android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups);
+// Exposed for testing.
+android::status_t parseLegacyVolumeFile(const char* path, VolumeGroups &volumeGroups);
} // namespace engineConfig
} // namespace android
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index daf6418..7cfef5b 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -589,6 +589,7 @@
}
}
}
+ VolumeGroups tempVolumeGroups = volumeGroups;
for (const auto &volumeMapIter : legacyVolumeMap) {
// In order to let AudioService setting the min and max (compatibility), set Min and Max
// to -1 except for private streams
@@ -599,8 +600,10 @@
}
int indexMin = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 0 : -1;
int indexMax = streamType >= AUDIO_STREAM_PUBLIC_CNT ? 100 : -1;
- volumeGroups.push_back({ volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
+ tempVolumeGroups.push_back(
+ { volumeMapIter.first, indexMin, indexMax, volumeMapIter.second });
}
+ std::swap(tempVolumeGroups, volumeGroups);
return NO_ERROR;
}
@@ -695,35 +698,14 @@
return deserializeLegacyVolumeCollection(doc, cur, volumeGroups, nbSkippedElements);
}
-static const int gApmXmlConfigFilePathMaxLength = 128;
-
-static constexpr const char *apmXmlConfigFileName = "audio_policy_configuration.xml";
-static constexpr const char *apmA2dpOffloadDisabledXmlConfigFileName =
- "audio_policy_configuration_a2dp_offload_disabled.xml";
-
android::status_t parseLegacyVolumes(VolumeGroups &volumeGroups) {
- char audioPolicyXmlConfigFile[gApmXmlConfigFilePathMaxLength];
- std::vector<const char *> fileNames;
- status_t ret;
-
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- fileNames.push_back(apmA2dpOffloadDisabledXmlConfigFileName);
+ if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+ !audioPolicyXmlConfigFile.empty()) {
+ return parseLegacyVolumeFile(audioPolicyXmlConfigFile.c_str(), volumeGroups);
+ } else {
+ ALOGE("No readable audio policy config file found");
+ return BAD_VALUE;
}
- fileNames.push_back(apmXmlConfigFileName);
-
- for (const char* fileName : fileNames) {
- for (const auto& path : audio_get_configuration_paths()) {
- snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
- "%s/%s", path.c_str(), fileName);
- ret = parseLegacyVolumeFile(audioPolicyXmlConfigFile, volumeGroups);
- if (ret == NO_ERROR) {
- return ret;
- }
- }
- }
- return BAD_VALUE;
}
} // namespace engineConfig
diff --git a/services/audiopolicy/engine/config/tests/Android.bp b/services/audiopolicy/engine/config/tests/Android.bp
new file mode 100644
index 0000000..6b0774f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/Android.bp
@@ -0,0 +1,25 @@
+cc_test {
+ name: "audiopolicy_engineconfig_tests",
+
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libmedia_helper",
+ "libutils",
+ "libxml2",
+ ],
+ static_libs: [
+ "libaudiopolicyengine_config",
+ ],
+
+ srcs: ["engineconfig_tests.cpp"],
+
+ data: [":audiopolicy_engineconfig_files"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ test_suites: ["device-tests"],
+}
diff --git a/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
new file mode 100644
index 0000000..f61e02f
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/engineconfig_tests.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gtest/gtest.h>
+
+#define LOG_TAG "APM_Test"
+#include <android-base/file.h>
+#include <log/log.h>
+
+#include "EngineConfig.h"
+
+using namespace android;
+
+TEST(EngineConfigTestInit, LegacyVolumeGroupsLoadingIsTransactional) {
+ engineConfig::VolumeGroups groups;
+ ASSERT_TRUE(groups.empty());
+ status_t status = engineConfig::parseLegacyVolumeFile(
+ (base::GetExecutableDirectory() + "/test_invalid_apm_volume_tables.xml").c_str(),
+ groups);
+ ASSERT_NE(NO_ERROR, status);
+ EXPECT_TRUE(groups.empty());
+ status = engineConfig::parseLegacyVolumeFile(
+ (base::GetExecutableDirectory() + "/test_apm_volume_tables.xml").c_str(),
+ groups);
+ ASSERT_EQ(NO_ERROR, status);
+ EXPECT_FALSE(groups.empty());
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/Android.bp b/services/audiopolicy/engine/config/tests/resources/Android.bp
new file mode 100644
index 0000000..0aee0e9
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/Android.bp
@@ -0,0 +1,7 @@
+filegroup {
+ name: "audiopolicy_engineconfig_files",
+ srcs: [
+ "test_apm_volume_tables.xml",
+ "test_invalid_apm_volume_tables.xml",
+ ],
+}
diff --git a/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
new file mode 100644
index 0000000..16126b6
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_apm_volume_tables.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+ <volumes>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+ <point>0,-4200</point>
+ <point>33,-2800</point>
+ <point>66,-1400</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-2400</point>
+ <point>33,-1600</point>
+ <point>66,-800</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ </volumes>
+ <volumes>
+ <reference name="FULL_SCALE_VOLUME_CURVE">
+ <!-- Full Scale reference Volume Curve -->
+ <point>0,0</point>
+ <point>100,0</point>
+ </reference>
+ </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
new file mode 100644
index 0000000..3ec5d10
--- /dev/null
+++ b/services/audiopolicy/engine/config/tests/resources/test_invalid_apm_volume_tables.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- This file uses a non-existent audio stream name. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+ <volumes>
+ <volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEADSET">
+ <point>0,-4200</point>
+ <point>33,-2800</point>
+ <point>66,-1400</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_NON_EXISTING" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-2400</point>
+ <point>33,-1600</point>
+ <point>66,-800</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_RING" deviceCategory="DEVICE_CATEGORY_HEADSET"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_MUSIC" deviceCategory="DEVICE_CATEGORY_HEADSET"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_ALARM" deviceCategory="DEVICE_CATEGORY_SPEAKER">
+ <point>0,-2970</point>
+ <point>33,-2010</point>
+ <point>66,-1020</point>
+ <point>100,0</point>
+ </volume>
+ <volume stream="AUDIO_STREAM_NOTIFICATION" deviceCategory="DEVICE_CATEGORY_HEADSET"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_BLUETOOTH_SCO" deviceCategory="DEVICE_CATEGORY_EXT_MEDIA"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_ENFORCED_AUDIBLE" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ <volume stream="AUDIO_STREAM_DTMF" deviceCategory="DEVICE_CATEGORY_SPEAKER"
+ ref="FULL_SCALE_VOLUME_CURVE"/>
+ </volumes>
+ <volumes>
+ <reference name="FULL_SCALE_VOLUME_CURVE">
+ <!-- Full Scale reference Volume Curve -->
+ <point>0,0</point>
+ <point>100,0</point>
+ </reference>
+ </volumes>
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e4b0dd1..4a3e31f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -29,31 +29,26 @@
#define ALOGVV(a...) do { } while(0)
#endif
-#define AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH 128
-#define AUDIO_POLICY_XML_CONFIG_FILE_NAME "audio_policy_configuration.xml"
-#define AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME \
- "audio_policy_configuration_a2dp_offload_disabled.xml"
-#define AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME \
- "audio_policy_configuration_bluetooth_legacy_hal.xml"
-
#include <algorithm>
#include <inttypes.h>
#include <math.h>
#include <set>
#include <unordered_set>
#include <vector>
+
+#include <Serializer.h>
#include <cutils/bitops.h>
#include <cutils/properties.h>
-#include <utils/Log.h>
#include <media/AudioParameter.h>
+#include <policy.h>
#include <private/android_filesystem_config.h>
#include <system/audio.h>
#include <system/audio_config.h>
#include <system/audio_effects/effect_hapticgenerator.h>
+#include <utils/Log.h>
+
#include "AudioPolicyManager.h"
-#include <Serializer.h>
#include "TypeConverter.h"
-#include <policy.h>
namespace android {
@@ -4539,37 +4534,15 @@
}
static status_t deserializeAudioPolicyXmlConfig(AudioPolicyConfig &config) {
- char audioPolicyXmlConfigFile[AUDIO_POLICY_XML_CONFIG_FILE_PATH_MAX_LENGTH];
- std::vector<const char*> fileNames;
- status_t ret;
-
- if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false)) {
- if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false) &&
- property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // Both BluetoothAudio@2.0 and BluetoothA2dp@1.0 (Offlaod) are disabled, and uses
- // the legacy hardware module for A2DP and hearing aid.
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
- } else if (property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
- // A2DP offload supported but disabled: try to use special XML file
- fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ if (std::string audioPolicyXmlConfigFile = audio_get_audio_policy_config_file();
+ !audioPolicyXmlConfigFile.empty()) {
+ status_t ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile.c_str(), &config);
+ if (ret == NO_ERROR) {
+ config.setSource(audioPolicyXmlConfigFile);
}
- } else if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.disabled", false)) {
- fileNames.push_back(AUDIO_POLICY_BLUETOOTH_LEGACY_HAL_XML_CONFIG_FILE_NAME);
+ return ret;
}
- fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
-
- for (const char* fileName : fileNames) {
- for (const auto& path : audio_get_configuration_paths()) {
- snprintf(audioPolicyXmlConfigFile, sizeof(audioPolicyXmlConfigFile),
- "%s/%s", path.c_str(), fileName);
- ret = deserializeAudioPolicyFile(audioPolicyXmlConfigFile, &config);
- if (ret == NO_ERROR) {
- config.setSource(audioPolicyXmlConfigFile);
- return ret;
- }
- }
- }
- return ret;
+ return BAD_VALUE;
}
AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterface,
@@ -5638,8 +5611,8 @@
}
DeviceVector activeDevices;
DeviceVector devices;
- for (audio_stream_type_t curStream = AUDIO_STREAM_MIN; curStream < AUDIO_STREAM_PUBLIC_CNT;
- curStream = (audio_stream_type_t) (curStream + 1)) {
+ for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_PUBLIC_CNT; ++i) {
+ const audio_stream_type_t curStream{static_cast<audio_stream_type_t>(i)};
if (!streamsMatchForvolume(stream, curStream)) {
continue;
}
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index ca2164b..7972dbf 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -58,6 +58,34 @@
ASSERT_EQ(NO_INIT, manager.initCheck());
}
+// Verifies that a failure while loading a config doesn't leave
+// APM config in a "dirty" state. Since AudioPolicyConfig object
+// is a proxy for the data hosted by APM, it isn't possible
+// to "deep copy" it, and thus we have to test its elements
+// individually.
+TEST(AudioPolicyManagerTestInit, ConfigLoadingIsTransactional) {
+ AudioPolicyTestClient client;
+ AudioPolicyTestManager manager(&client);
+ ASSERT_TRUE(manager.getConfig().getHwModules().isEmpty());
+ ASSERT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+ ASSERT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+ status_t status = deserializeAudioPolicyFile(
+ (base::GetExecutableDirectory() +
+ "/test_invalid_audio_policy_configuration.xml").c_str(),
+ &manager.getConfig());
+ ASSERT_NE(NO_ERROR, status);
+ EXPECT_TRUE(manager.getConfig().getHwModules().isEmpty());
+ EXPECT_TRUE(manager.getConfig().getInputDevices().isEmpty());
+ EXPECT_TRUE(manager.getConfig().getOutputDevices().isEmpty());
+ status = deserializeAudioPolicyFile(
+ (base::GetExecutableDirectory() + "/test_audio_policy_configuration.xml").c_str(),
+ &manager.getConfig());
+ ASSERT_EQ(NO_ERROR, status);
+ EXPECT_FALSE(manager.getConfig().getHwModules().isEmpty());
+ EXPECT_FALSE(manager.getConfig().getInputDevices().isEmpty());
+ EXPECT_FALSE(manager.getConfig().getOutputDevices().isEmpty());
+}
+
class PatchCountCheck {
public:
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index d9476d9..4f50dad 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -3,6 +3,7 @@
srcs: [
"test_audio_policy_configuration.xml",
"test_audio_policy_primary_only_configuration.xml",
+ "test_invalid_audio_policy_configuration.xml",
"test_tv_apm_configuration.xml",
],
}
diff --git a/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
new file mode 100644
index 0000000..25641d5
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_invalid_audio_policy_configuration.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- This file contains an unnamed device port in the "r_submix" module section. -->
+<audioPolicyConfiguration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <globalConfiguration speaker_drc_enabled="true"/>
+
+ <modules>
+ <!-- Primary module -->
+ <module name="primary" halVersion="2.0">
+ <attachedDevices>
+ <item>Speaker</item>
+ <item>Built-In Mic</item>
+ </attachedDevices>
+ <defaultOutputDevice>Speaker</defaultOutputDevice>
+ <mixPorts>
+ <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="primary input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bt_hfp_output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="mixport_bt_hfp_input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="8000,11025,16000,44100,48000"
+ channelMasks="AUDIO_CHANNEL_IN_STEREO,AUDIO_CHANNEL_IN_MONO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+ </devicePort>
+ <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+ </devicePort>
+ <devicePort tagName="Hdmi" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+ </devicePort>
+ <devicePort tagName="Hdmi-In Mic" type="AUDIO_DEVICE_IN_HDMI" role="source">
+ </devicePort>
+ <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO"
+ role="sink" address="hfp_client_out">
+ </devicePort>
+ <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET"
+ role="source" address="hfp_client_in">
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Speaker"
+ sources="primary output"/>
+ <route type="mix" sink="primary input"
+ sources="Built-In Mic,Hdmi-In Mic"/>
+ <route type="mix" sink="Hdmi"
+ sources="primary output"/>
+ <route type="mix" sink="BT SCO"
+ sources="mixport_bt_hfp_output"/>
+ <route type="mix" sink="mixport_bt_hfp_input"
+ sources="BT SCO Headset Mic"/>
+ </routes>
+ </module>
+
+ <!-- Remote Submix module -->
+ <module name="r_submix" halVersion="2.0">
+ <attachedDevices>
+ <item>Remote Submix In</item>
+ </attachedDevices>
+ <mixPorts>
+ <mixPort name="r_submix output" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </mixPort>
+ <mixPort name="r_submix input" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </mixPort>
+ </mixPorts>
+ <devicePorts>
+ <!-- This port is missing "tagName" attribute. -->
+ <devicePort type="AUDIO_DEVICE_OUT_REMOTE_SUBMIX" role="sink">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+ </devicePort>
+ <devicePort tagName="Remote Submix In" type="AUDIO_DEVICE_IN_REMOTE_SUBMIX" role="source">
+ <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+ samplingRates="48000" channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+ </devicePort>
+ </devicePorts>
+ <routes>
+ <route type="mix" sink="Remote Submix Out"
+ sources="r_submix output"/>
+ <route type="mix" sink="r_submix input"
+ sources="Remote Submix In"/>
+ </routes>
+ </module>
+ </modules>
+</audioPolicyConfiguration>
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index f033d5c..91590e1 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -131,6 +131,7 @@
"statsd_codec.cpp",
"statsd_drm.cpp",
"statsd_extractor.cpp",
+ "statsd_mediaparser.cpp",
"statsd_nuplayer.cpp",
"statsd_recorder.cpp",
"StringUtils.cpp"
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 33dfa8fa..34be0b9 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -28,7 +28,7 @@
#include <cutils/properties.h>
#include <statslog.h>
#include <sys/timerfd.h>
-#include <system/audio-base.h>
+#include <system/audio.h>
// property to disable audio power use metrics feature, default is enabled
#define PROP_AUDIO_METRICS_DISABLED "persist.media.audio_metrics.power_usage_disabled"
diff --git a/services/mediametrics/iface_statsd.cpp b/services/mediametrics/iface_statsd.cpp
index 6e51f72..16204de 100644
--- a/services/mediametrics/iface_statsd.cpp
+++ b/services/mediametrics/iface_statsd.cpp
@@ -64,6 +64,7 @@
{ "drmmanager", statsd_drmmanager },
{ "extractor", statsd_extractor },
{ "mediadrm", statsd_mediadrm },
+ { "mediaparser", statsd_mediaparser },
{ "nuplayer", statsd_nuplayer },
{ "nuplayer2", statsd_nuplayer },
{ "recorder", statsd_recorder },
diff --git a/services/mediametrics/iface_statsd.h b/services/mediametrics/iface_statsd.h
index 19505a4..9b49556 100644
--- a/services/mediametrics/iface_statsd.h
+++ b/services/mediametrics/iface_statsd.h
@@ -25,6 +25,7 @@
extern bool statsd_audiotrack(const mediametrics::Item *);
extern bool statsd_codec(const mediametrics::Item *);
extern bool statsd_extractor(const mediametrics::Item *);
+extern bool statsd_mediaparser(const mediametrics::Item *);
extern bool statsd_nuplayer(const mediametrics::Item *);
extern bool statsd_recorder(const mediametrics::Item *);
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
new file mode 100644
index 0000000..3258ebf
--- /dev/null
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "statsd_mediaparser"
+#include <utils/Log.h>
+
+#include <dirent.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <pwd.h>
+#include <stdint.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <statslog.h>
+
+#include "MediaMetricsService.h"
+#include "frameworks/base/core/proto/android/stats/mediametrics/mediametrics.pb.h"
+#include "iface_statsd.h"
+
+namespace android {
+
+bool statsd_mediaparser(const mediametrics::Item *item)
+{
+ if (item == nullptr) {
+ return false;
+ }
+
+ // statsd wrapper data.
+ const nsecs_t timestamp = MediaMetricsService::roundTime(item->getTimestamp());
+ std::string pkgName = item->getPkgName();
+ int64_t pkgVersionCode = item->getPkgVersionCode();
+
+ std::string parserName;
+ item->getString("android.media.mediaparser.parserName", &parserName);
+
+ int32_t createdByName = -1;
+ item->getInt32("android.media.mediaparser.createdByName", &createdByName);
+
+ std::string parserPool;
+ item->getString("android.media.mediaparser.parserPool", &parserPool);
+
+ std::string lastException;
+ item->getString("android.media.mediaparser.lastException", &lastException);
+
+ int64_t resourceByteCount = -1;
+ item->getInt64("android.media.mediaparser.resourceByteCount", &resourceByteCount);
+
+ int64_t durationMillis = -1;
+ item->getInt64("android.media.mediaparser.durationMillis", &durationMillis);
+
+ std::string trackMimeTypes;
+ item->getString("android.media.mediaparser.trackMimeTypes", &trackMimeTypes);
+
+ std::string trackCodecs;
+ item->getString("android.media.mediaparser.trackCodecs", &trackCodecs);
+
+ std::string alteredParameters;
+ item->getString("android.media.mediaparser.alteredParameters", &alteredParameters);
+
+ int32_t videoWidth = -1;
+ item->getInt32("android.media.mediaparser.videoWidth", &videoWidth);
+
+ int32_t videoHeight = -1;
+ item->getInt32("android.media.mediaparser.videoHeight", &videoHeight);
+
+ if (enabled_statsd) {
+ (void) android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
+ timestamp,
+ pkgName.c_str(),
+ pkgVersionCode,
+ parserName.c_str(),
+ createdByName,
+ parserPool.c_str(),
+ lastException.c_str(),
+ resourceByteCount,
+ durationMillis,
+ trackMimeTypes.c_str(),
+ trackCodecs.c_str(),
+ alteredParameters.c_str(),
+ videoWidth,
+ videoHeight);
+ } else {
+ ALOGV("NOT sending MediaParser media metrics.");
+ }
+
+ return true;
+}
+
+} // namespace android
diff --git a/services/oboeservice/AAudioEndpointManager.cpp b/services/oboeservice/AAudioEndpointManager.cpp
index 9f34153..483a264 100644
--- a/services/oboeservice/AAudioEndpointManager.cpp
+++ b/services/oboeservice/AAudioEndpointManager.cpp
@@ -306,6 +306,7 @@
mSharedStreams.end());
serviceEndpoint->close();
+
mSharedCloseCount++;
ALOGV("%s(%p) closed for device %d",
__func__, serviceEndpoint.get(), serviceEndpoint->getDeviceId());
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index f5de59f..caf6139 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -88,23 +88,30 @@
}
void AAudioServiceEndpointShared::close() {
- getStreamInternal()->releaseCloseFinal();
+ stopSharingThread();
+ getStreamInternal()->safeReleaseClose();
}
// Glue between C and C++ callbacks.
static void *aaudio_endpoint_thread_proc(void *arg) {
assert(arg != nullptr);
+ ALOGD("%s() called", __func__);
- // The caller passed in a smart pointer to prevent the endpoint from getting deleted
- // while the thread was launching.
- sp<AAudioServiceEndpointShared> *endpointForThread =
- static_cast<sp<AAudioServiceEndpointShared> *>(arg);
- sp<AAudioServiceEndpointShared> endpoint = *endpointForThread;
- delete endpointForThread; // Just use scoped smart pointer. Don't need this anymore.
+ // Prevent the stream from being deleted while being used.
+ // This is just for extra safety. It is probably not needed because
+ // this callback should be joined before the stream is closed.
+ AAudioServiceEndpointShared *endpointPtr =
+ static_cast<AAudioServiceEndpointShared *>(arg);
+ android::sp<AAudioServiceEndpointShared> endpoint(endpointPtr);
+ // Balance the incStrong() in startSharingThread_l().
+ endpoint->decStrong(nullptr);
+
void *result = endpoint->callbackLoop();
// Close now so that the HW resource is freed and we can open a new device.
if (!endpoint->isConnected()) {
- endpoint->close();
+ ALOGD("%s() call safeReleaseCloseFromCallback()", __func__);
+ // Release and close under a lock with no check for callback collisions.
+ endpoint->getStreamInternal()->safeReleaseCloseFromCallback();
}
return result;
@@ -116,14 +123,14 @@
* AAUDIO_NANOS_PER_SECOND
/ getSampleRate();
mCallbackEnabled.store(true);
- // Pass a smart pointer so the thread can hold a reference.
- sp<AAudioServiceEndpointShared> *endpointForThread = new sp<AAudioServiceEndpointShared>(this);
- aaudio_result_t result = getStreamInternal()->createThread(periodNanos,
- aaudio_endpoint_thread_proc,
- endpointForThread);
+ // Prevent this object from getting deleted before the thread has a chance to create
+ // its strong pointer. Assume the thread will call decStrong().
+ this->incStrong(nullptr);
+ aaudio_result_t result = getStreamInternal()->createThread_l(periodNanos,
+ aaudio_endpoint_thread_proc,
+ this);
if (result != AAUDIO_OK) {
- // The thread can't delete it so we have to do it here.
- delete endpointForThread;
+ this->decStrong(nullptr); // Because the thread won't do it.
}
return result;
}
@@ -141,13 +148,13 @@
{
std::lock_guard<std::mutex> lock(mLockStreams);
if (++mRunningStreamCount == 1) { // atomic
- result = getStreamInternal()->requestStart();
+ result = getStreamInternal()->requestStart_l();
if (result != AAUDIO_OK) {
--mRunningStreamCount;
} else {
result = startSharingThread_l();
if (result != AAUDIO_OK) {
- getStreamInternal()->requestStop();
+ getStreamInternal()->requestStop_l();
--mRunningStreamCount;
}
}
@@ -161,7 +168,7 @@
if (result != AAUDIO_OK) {
if (--mRunningStreamCount == 0) { // atomic
stopSharingThread();
- getStreamInternal()->requestStop();
+ getStreamInternal()->requestStop_l();
}
}
}
@@ -176,7 +183,7 @@
if (--mRunningStreamCount == 0) { // atomic
stopSharingThread(); // the sharing thread locks mLockStreams
- getStreamInternal()->requestStop();
+ getStreamInternal()->requestStop_l();
}
return AAUDIO_OK;
}
diff --git a/services/oboeservice/AAudioServiceEndpointShared.h b/services/oboeservice/AAudioServiceEndpointShared.h
index 020b926..91a86c1 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.h
+++ b/services/oboeservice/AAudioServiceEndpointShared.h
@@ -37,6 +37,8 @@
public:
explicit AAudioServiceEndpointShared(AudioStreamInternal *streamInternal);
+ virtual ~AAudioServiceEndpointShared() = default;
+
std::string dump() const override;
aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
@@ -55,12 +57,12 @@
virtual void *callbackLoop() = 0;
-protected:
-
AudioStreamInternal *getStreamInternal() const {
return mStreamInternal.get();
};
+protected:
+
aaudio_result_t startSharingThread_l();
aaudio_result_t stopSharingThread();
diff --git a/services/oboeservice/AAudioThread.cpp b/services/oboeservice/AAudioThread.cpp
index ed7895b..68496ac 100644
--- a/services/oboeservice/AAudioThread.cpp
+++ b/services/oboeservice/AAudioThread.cpp
@@ -37,10 +37,13 @@
setup("AAudio");
}
-void AAudioThread::setup(const char *prefix) {
- // mThread is a pthread_t of unknown size so we need memset().
- memset(&mThread, 0, sizeof(mThread));
+AAudioThread::~AAudioThread() {
+ ALOGE_IF(pthread_equal(pthread_self(), mThread),
+ "%s() destructor running in thread", __func__);
+ ALOGE_IF(mHasThread, "%s() thread never joined", __func__);
+}
+void AAudioThread::setup(const char *prefix) {
// Name the thread with an increasing index, "prefix_#", for debugging.
uint32_t index = mNextThreadIndex++;
// Wrap the index so that we do not hit the 16 char limit
@@ -57,7 +60,7 @@
}
}
-// This is the entry point for the new thread created by createThread().
+// This is the entry point for the new thread created by createThread_l().
// It converts the 'C' function call to a C++ method call.
static void * AAudioThread_internalThreadProc(void *arg) {
AAudioThread *aaudioThread = (AAudioThread *) arg;
@@ -90,13 +93,18 @@
ALOGE("stop() but no thread running");
return AAUDIO_ERROR_INVALID_STATE;
}
+ // Check to see if the thread is trying to stop itself.
+ if (pthread_equal(pthread_self(), mThread)) {
+ ALOGE("%s() attempt to pthread_join() from launched thread!", __func__);
+ return AAUDIO_ERROR_INTERNAL;
+ }
+
int err = pthread_join(mThread, nullptr);
- mHasThread = false;
if (err != 0) {
ALOGE("stop() - pthread_join() returned %d %s", err, strerror(err));
return AAudioConvert_androidToAAudioResult(-err);
} else {
+ mHasThread = false;
return AAUDIO_OK;
}
}
-
diff --git a/services/oboeservice/AAudioThread.h b/services/oboeservice/AAudioThread.h
index dcce68a..08a8a98 100644
--- a/services/oboeservice/AAudioThread.h
+++ b/services/oboeservice/AAudioThread.h
@@ -46,7 +46,7 @@
explicit AAudioThread(const char *prefix);
- virtual ~AAudioThread() = default;
+ virtual ~AAudioThread();
/**
* Start the thread running.
@@ -73,7 +73,7 @@
Runnable *mRunnable = nullptr;
bool mHasThread = false;
- pthread_t mThread; // initialized in constructor
+ pthread_t mThread = {};
static std::atomic<uint32_t> mNextThreadIndex;
char mName[16]; // max length for a pthread_name