Merge "C2SoftAacDec: Fix reset while codec's stop()"
diff --git a/apex/Android.bp b/apex/Android.bp
index dabf4c2..5a88d24 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -24,7 +24,7 @@
apex_defaults {
name: "com.android.media-defaults",
updatable: true,
- java_libs: ["updatable-media"],
+ bootclasspath_fragments: ["com.android.media-bootclasspath-fragment"],
multilib: {
first: {
// Extractor process runs only with the primary ABI.
@@ -74,6 +74,7 @@
manifest: "manifest.json",
defaults: ["com.android.media-defaults"],
prebuilts: [
+ "current_sdkinfo",
"media-linker-config",
],
}
@@ -84,6 +85,13 @@
installable: false,
}
+// Encapsulate the contributions made by the com.android.media to the bootclasspath.
+bootclasspath_fragment {
+ name: "com.android.media-bootclasspath-fragment",
+ contents: ["updatable-media"],
+ apex_available: ["com.android.media"],
+}
+
filegroup {
name: "com.android.media-androidManifest",
srcs: ["AndroidManifest-media.xml"],
diff --git a/apex/OWNERS b/apex/OWNERS
index 5587f5f..a60d04b 100644
--- a/apex/OWNERS
+++ b/apex/OWNERS
@@ -4,3 +4,6 @@
lajos@google.com
marcone@google.com
wjia@google.com
+
+include platform/packages/modules/common:/MODULES_OWNERS
+
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index 755051c..be47898 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -89,7 +89,7 @@
status_t PhysicalCaptureResultInfo::readFromParcel(const android::Parcel* parcel) {
status_t res;
- mPhysicalCameraId.remove(mPhysicalCameraId.size());
+ mPhysicalCameraId.setTo(u"");
mPhysicalCameraMetadata.clear();
if ((res = parcel->readString16(&mPhysicalCameraId)) != OK) {
diff --git a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
index 6ac3510..089eb1c 100644
--- a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
@@ -207,6 +207,7 @@
}
infoMap.clear();
+ android::Mutex::Autolock lock(mPlayPolicyLock);
for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
infoMap.add(mPlayPolicy.keyAt(i), mPlayPolicy.valueAt(i));
}
diff --git a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
index aa9b59d..95f15ca 100644
--- a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
@@ -262,7 +262,7 @@
void initProperties();
void setPlayPolicy();
- android::Mutex mPlayPolicyLock;
+ mutable android::Mutex mPlayPolicyLock;
android::KeyedVector<String8, String8> mPlayPolicy;
android::KeyedVector<String8, String8> mStringProperties;
android::KeyedVector<String8, Vector<uint8_t>> mByteArrayProperties;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/Android.bp b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
index e6e1f80..c49d5fe 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/hidl/Android.bp
@@ -50,7 +50,7 @@
relative_install_path: "hw",
- cflags: ["-Wall", "-Werror"],
+ cflags: ["-Wall", "-Werror", "-Wthread-safety"],
shared_libs: [
"android.hardware.drm@1.0",
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index d278633..302dd39 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -37,6 +37,8 @@
sp<IMemory> hidlMemory = mapMemory(base);
ALOGE_IF(hidlMemory == nullptr, "mapMemory returns nullptr");
+ std::lock_guard<std::mutex> shared_buffer_lock(mSharedBufferLock);
+
// allow mapMemory to return nullptr
mSharedBufferMap[bufferId] = hidlMemory;
return Void();
@@ -94,6 +96,7 @@
return Void();
}
+ std::unique_lock<std::mutex> shared_buffer_lock(mSharedBufferLock);
if (mSharedBufferMap.find(source.bufferId) == mSharedBufferMap.end()) {
_hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0,
"source decrypt buffer base not set");
@@ -142,12 +145,17 @@
base = static_cast<uint8_t *>(static_cast<void *>(destBase->getPointer()));
- if (destBuffer.offset + destBuffer.size > destBase->getSize()) {
+ totalSize = 0;
+ if (__builtin_add_overflow(destBuffer.offset, destBuffer.size, &totalSize) ||
+ totalSize > destBase->getSize()) {
+ android_errorWriteLog(0x534e4554, "176444622");
_hidl_cb(Status_V1_2::ERROR_DRM_FRAME_TOO_LARGE, 0, "invalid buffer size");
return Void();
}
- destPtr = static_cast<void *>(base + destination.nonsecureMemory.offset);
+ destPtr = static_cast<void*>(base + destination.nonsecureMemory.offset);
+ // release mSharedBufferLock
+ shared_buffer_lock.unlock();
// Calculate the output buffer size and determine if any subsamples are
// encrypted.
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index a77759e..6f69110 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -220,6 +220,7 @@
if (requestString.find(kOfflineLicense) != std::string::npos) {
std::string emptyResponse;
std::string keySetIdString(keySetId.begin(), keySetId.end());
+ Mutex::Autolock lock(mFileHandleLock);
if (!mFileHandle.StoreLicense(keySetIdString,
DeviceFiles::kLicenseStateReleasing,
emptyResponse)) {
@@ -335,6 +336,7 @@
}
*keySetId = kKeySetIdPrefix + ByteArrayToHexString(
reinterpret_cast<const uint8_t*>(randomData.data()), randomData.size());
+ Mutex::Autolock lock(mFileHandleLock);
if (mFileHandle.LicenseExists(*keySetId)) {
// collision, regenerate
ALOGV("Retry generating KeySetId");
@@ -392,6 +394,7 @@
if (status == Status::OK) {
if (isOfflineLicense) {
if (isRelease) {
+ Mutex::Autolock lock(mFileHandleLock);
mFileHandle.DeleteLicense(keySetId);
mSessionLibrary->destroySession(session);
} else {
@@ -400,6 +403,7 @@
return Void();
}
+ Mutex::Autolock lock(mFileHandleLock);
bool ok = mFileHandle.StoreLicense(
keySetId,
DeviceFiles::kLicenseStateActive,
@@ -454,6 +458,7 @@
DeviceFiles::LicenseState licenseState;
std::string offlineLicense;
Status status = Status::OK;
+ Mutex::Autolock lock(mFileHandleLock);
if (!mFileHandle.RetrieveLicense(std::string(keySetId.begin(), keySetId.end()),
&licenseState, &offlineLicense)) {
ALOGE("Failed to restore offline license");
@@ -576,7 +581,6 @@
Return<void> DrmPlugin::queryKeyStatus(
const hidl_vec<uint8_t>& sessionId,
queryKeyStatus_cb _hidl_cb) {
-
if (sessionId.size() == 0) {
// Returns empty key status KeyValue pair
_hidl_cb(Status::BAD_VALUE, hidl_vec<KeyValue>());
@@ -586,12 +590,14 @@
std::vector<KeyValue> infoMapVec;
infoMapVec.clear();
+ mPlayPolicyLock.lock();
KeyValue keyValuePair;
for (size_t i = 0; i < mPlayPolicy.size(); ++i) {
keyValuePair.key = mPlayPolicy[i].key;
keyValuePair.value = mPlayPolicy[i].value;
infoMapVec.push_back(keyValuePair);
}
+ mPlayPolicyLock.unlock();
_hidl_cb(Status::OK, toHidlVec(infoMapVec));
return Void();
}
@@ -704,6 +710,8 @@
}
Return<void> DrmPlugin::getOfflineLicenseKeySetIds(getOfflineLicenseKeySetIds_cb _hidl_cb) {
+ Mutex::Autolock lock(mFileHandleLock);
+
std::vector<std::string> licenseNames = mFileHandle.ListLicenses();
std::vector<KeySetId> keySetIds;
if (mMockError != Status_V1_2::OK) {
@@ -724,6 +732,7 @@
return toStatus_1_0(mMockError);
}
std::string licenseName(keySetId.begin(), keySetId.end());
+ Mutex::Autolock lock(mFileHandleLock);
if (mFileHandle.DeleteLicense(licenseName)) {
return Status::OK;
}
@@ -732,6 +741,8 @@
Return<void> DrmPlugin::getOfflineLicenseState(const KeySetId& keySetId,
getOfflineLicenseState_cb _hidl_cb) {
+ Mutex::Autolock lock(mFileHandleLock);
+
std::string licenseName(keySetId.begin(), keySetId.end());
DeviceFiles::LicenseState state;
std::string license;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
index 051a968..32cf2dc 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/MemoryFileSystem.cpp
@@ -24,11 +24,13 @@
}
bool MemoryFileSystem::FileExists(const std::string& fileName) const {
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
auto result = mMemoryFileSystem.find(fileName);
return result != mMemoryFileSystem.end();
}
ssize_t MemoryFileSystem::GetFileSize(const std::string& fileName) const {
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
auto result = mMemoryFileSystem.find(fileName);
if (result != mMemoryFileSystem.end()) {
return static_cast<ssize_t>(result->second.getFileSize());
@@ -40,6 +42,7 @@
std::vector<std::string> MemoryFileSystem::ListFiles() const {
std::vector<std::string> list;
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
for (const auto& filename : mMemoryFileSystem) {
list.push_back(filename.first);
}
@@ -48,6 +51,7 @@
size_t MemoryFileSystem::Read(const std::string& path, std::string* buffer) {
std::string key = GetFileName(path);
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
auto result = mMemoryFileSystem.find(key);
if (result != mMemoryFileSystem.end()) {
std::string serializedHashFile = result->second.getContent();
@@ -61,6 +65,7 @@
size_t MemoryFileSystem::Write(const std::string& path, const MemoryFile& memoryFile) {
std::string key = GetFileName(path);
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
auto result = mMemoryFileSystem.find(key);
if (result != mMemoryFileSystem.end()) {
mMemoryFileSystem.erase(key);
@@ -70,6 +75,7 @@
}
bool MemoryFileSystem::RemoveFile(const std::string& fileName) {
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
auto result = mMemoryFileSystem.find(fileName);
if (result != mMemoryFileSystem.end()) {
mMemoryFileSystem.erase(result);
@@ -81,6 +87,7 @@
}
bool MemoryFileSystem::RemoveAllFiles() {
+ std::lock_guard<std::mutex> lock(mMemoryFileSystemLock);
mMemoryFileSystem.clear();
return mMemoryFileSystem.empty();
}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
index 8680f0c..23a64fa 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/CryptoPlugin.h
@@ -20,6 +20,8 @@
#include <android/hardware/drm/1.2/ICryptoPlugin.h>
#include <android/hidl/memory/1.0/IMemory.h>
+#include <mutex>
+
#include "ClearKeyTypes.h"
#include "Session.h"
#include "Utils.h"
@@ -93,7 +95,7 @@
const SharedBuffer& source,
uint64_t offset,
const DestinationBuffer& destination,
- decrypt_1_2_cb _hidl_cb);
+ decrypt_1_2_cb _hidl_cb) NO_THREAD_SAFETY_ANALYSIS; // use unique_lock
Return<void> setSharedBufferBase(const hidl_memory& base,
uint32_t bufferId);
@@ -105,7 +107,8 @@
private:
CLEARKEY_DISALLOW_COPY_AND_ASSIGN(CryptoPlugin);
- std::map<uint32_t, sp<IMemory> > mSharedBufferMap;
+ std::mutex mSharedBufferLock;
+ std::map<uint32_t, sp<IMemory>> mSharedBufferMap GUARDED_BY(mSharedBufferLock);
sp<Session> mSession;
Status mInitStatus;
};
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 076beb8..894985b 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -416,7 +416,8 @@
mMockError = Status_V1_2::OK;
}
- DeviceFiles mFileHandle;
+ DeviceFiles mFileHandle GUARDED_BY(mFileHandleLock);
+ Mutex mFileHandleLock;
Mutex mSecureStopLock;
CLEARKEY_DISALLOW_COPY_AND_ASSIGN_AND_NEW(DrmPlugin);
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
index bcd9fd6..6ac0e2c 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/MemoryFileSystem.h
@@ -5,7 +5,9 @@
#ifndef CLEARKEY_MEMORY_FILE_SYSTEM_H_
#define CLEARKEY_MEMORY_FILE_SYSTEM_H_
+#include <android-base/thread_annotations.h>
#include <map>
+#include <mutex>
#include <string>
#include "ClearKeyTypes.h"
@@ -49,10 +51,12 @@
size_t Write(const std::string& pathName, const MemoryFile& memoryFile);
private:
+ mutable std::mutex mMemoryFileSystemLock;
+
// License file name is made up of a unique keySetId, therefore,
// the filename can be used as the key to locate licenses in the
// memory file system.
- std::map<std::string, MemoryFile> mMemoryFileSystem;
+ std::map<std::string, MemoryFile> mMemoryFileSystem GUARDED_BY(mMemoryFileSystemLock);
std::string GetFileName(const std::string& path);
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 62475ce..342d771 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -515,8 +515,8 @@
// TODO: error handling, proper usage, etc.
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- c2_status_t err = pool->fetchLinearBlock(
- numSamples * sizeof(int16_t), usage, &block);
+ size_t bufferSize = numSamples * sizeof(int16_t);
+ c2_status_t err = pool->fetchLinearBlock(bufferSize, usage, &block);
if (err != C2_OK) {
ALOGD("failed to fetch a linear block (%d)", err);
return std::bind(fillEmptyWork, _1, C2_NO_MEMORY);
@@ -530,7 +530,7 @@
mSignalledError = true;
return std::bind(fillEmptyWork, _1, C2_CORRUPTED);
}
- return [buffer = createLinearBuffer(block)](
+ return [buffer = createLinearBuffer(block, 0, bufferSize)](
const std::unique_ptr<C2Work> &work) {
work->result = C2_OK;
C2FrameData &output = work->worklets.front()->output;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index c08e02b..e92d38d 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -143,7 +143,7 @@
if (!mIsWide) {
Speech_Decode_Frame_reset(mAmrHandle);
} else {
- pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */);
+ pvDecoder_AmrWb_Reset(mAmrHandle, 1 /* reset_all */);
}
mSignalledError = false;
mSignalledOutputEos = false;
@@ -361,7 +361,13 @@
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+ // we filled the output buffer to (intptr_t)output - (intptr_t)wView.data()
+ // use calOutSize as that contains the expected number of samples
+ ALOGD_IF(calOutSize != ((intptr_t)output - (intptr_t)wView.data()),
+ "Expected %zu output bytes, but filled %lld",
+ calOutSize, (long long)((intptr_t)output - (intptr_t)wView.data()));
+ work->worklets.front()->output.buffers.push_back(
+ createLinearBuffer(block, 0, calOutSize));
work->worklets.front()->output.ordinal = work->input.ordinal;
if (eos) {
mSignalledOutputEos = true;
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index dfad226..6c4b7d9 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -592,21 +592,11 @@
}
std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
- const std::shared_ptr<C2LinearBlock> &block) {
- return createLinearBuffer(block, block->offset(), block->size());
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
}
std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
- const std::shared_ptr<C2GraphicBlock> &block) {
- return createGraphicBuffer(block, C2Rect(block->width(), block->height()));
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
const std::shared_ptr<C2GraphicBlock> &block, const C2Rect &crop) {
return C2Buffer::CreateGraphicBuffer(block->share(crop, ::C2Fence()));
}
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 22d5714..e5e16d8 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -140,15 +140,9 @@
std::shared_ptr<C2Buffer> createLinearBuffer(
- const std::shared_ptr<C2LinearBlock> &block);
-
- std::shared_ptr<C2Buffer> createLinearBuffer(
const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
std::shared_ptr<C2Buffer> createGraphicBuffer(
- const std::shared_ptr<C2GraphicBlock> &block);
-
- std::shared_ptr<C2Buffer> createGraphicBuffer(
const std::shared_ptr<C2GraphicBlock> &block,
const C2Rect &crop);
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index f9299af..f952f22 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -199,7 +199,7 @@
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
work->worklets.front()->output.ordinal = work->input.ordinal;
if (eos) {
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 76345ae..f857e87 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -26,6 +26,11 @@
#include <media/stagefright/foundation/MediaDefs.h>
namespace android {
+namespace {
+
+constexpr uint8_t NEUTRAL_UV_VALUE = 128;
+
+} // namespace
// codecname set and passed in as a compile flag from Android.bp
constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -51,8 +56,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(SizeSetter)
.build());
@@ -65,12 +70,14 @@
C2Config::PROFILE_AV1_1}),
C2F(mProfileLevel, level)
.oneOf({
- C2Config::LEVEL_AV1_2,
- C2Config::LEVEL_AV1_2_1,
- C2Config::LEVEL_AV1_2_2,
- C2Config::LEVEL_AV1_3,
- C2Config::LEVEL_AV1_3_1,
- C2Config::LEVEL_AV1_3_2,
+ C2Config::LEVEL_AV1_2, C2Config::LEVEL_AV1_2_1,
+ C2Config::LEVEL_AV1_2_2, C2Config::LEVEL_AV1_2_3,
+ C2Config::LEVEL_AV1_3, C2Config::LEVEL_AV1_3_1,
+ C2Config::LEVEL_AV1_3_2, C2Config::LEVEL_AV1_3_3,
+ C2Config::LEVEL_AV1_4, C2Config::LEVEL_AV1_4_1,
+ C2Config::LEVEL_AV1_4_2, C2Config::LEVEL_AV1_4_3,
+ C2Config::LEVEL_AV1_5, C2Config::LEVEL_AV1_5_1,
+ C2Config::LEVEL_AV1_5_2, C2Config::LEVEL_AV1_5_3,
})})
.withSetter(ProfileLevelSetter, mSize)
.build());
@@ -462,7 +469,8 @@
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
- uint32_t width, uint32_t height) {
+ uint32_t width, uint32_t height,
+ bool isMonochrome) {
for (size_t i = 0; i < height; ++i) {
memcpy(dstY, srcY, width);
@@ -470,6 +478,17 @@
dstY += dstYStride;
}
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t i = 0; i < height / 2; ++i) {
+ memset(dstV, NEUTRAL_UV_VALUE, width / 2);
+ memset(dstU, NEUTRAL_UV_VALUE, width / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
for (size_t i = 0; i < height / 2; ++i) {
memcpy(dstV, srcV, width / 2);
srcV += srcVStride;
@@ -555,7 +574,7 @@
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
- size_t width, size_t height) {
+ size_t width, size_t height, bool isMonochrome) {
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; ++x) {
@@ -566,6 +585,17 @@
dstY += dstYStride;
}
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
+ memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
for (size_t y = 0; y < (height + 1) / 2; ++y) {
for (size_t x = 0; x < (width + 1) / 2; ++x) {
dstU[x] = (uint8_t)(srcU[x] >> 2);
@@ -621,8 +651,16 @@
}
}
- // TODO(vigneshv): Add support for monochrome videos since AV1 supports it.
- CHECK(buffer->image_format == libgav1::kImageFormatYuv420);
+ if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+ buffer->image_format == libgav1::kImageFormatMonochrome400)) {
+ ALOGE("image_format %d not supported", buffer->image_format);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+ const bool isMonochrome =
+ buffer->image_format == libgav1::kImageFormatMonochrome400;
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
@@ -634,6 +672,13 @@
if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ if (buffer->image_format != libgav1::kImageFormatYuv420) {
+ ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+ mSignalledError = true;
+ work->result = C2_OMITTED;
+ work->workletsProcessed = 1u;
+ return false;
+ }
format = HAL_PIXEL_FORMAT_RGBA_1010102;
}
}
@@ -680,21 +725,18 @@
(uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar16ToYUV420Planar(
+ dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+ srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
+ isMonochrome);
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
- copyOutputBufferToYV12Frame(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ copyOutputBufferToYV12Frame(
+ dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
}
finishWork(buffer->user_private_data, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7e9090f..7486d27 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -635,7 +635,8 @@
}
work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
work->worklets.front()->output.buffers.clear();
- std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block);
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
0u /* stream id */, C2Config::SYNC_FRAME));
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index 6deafda..8bf4b72 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -361,9 +361,8 @@
C2WriteView wView = block->map().get();
int16_t* outBuffer = reinterpret_cast<int16_t*>(wView.data());
memcpy(outBuffer, mOutputDrainBuffer, mOutputDrainBufferWritePos);
- mOutputDrainBufferWritePos = 0;
- auto fillWork = [buffer = createLinearBuffer(block)](
+ auto fillWork = [buffer = createLinearBuffer(block, 0, mOutputDrainBufferWritePos)](
const std::unique_ptr<C2Work>& work) {
uint32_t flags = 0;
if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
@@ -376,6 +375,9 @@
work->worklets.front()->output.ordinal = work->input.ordinal;
work->workletsProcessed = 1u;
};
+
+ mOutputDrainBufferWritePos = 0;
+
if (work && work->input.ordinal.frameIndex == c2_cntr64_t(mCurFrameIndex)) {
fillWork(work);
} else {
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index a5d6fbf..abe343b 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -898,6 +898,12 @@
* Obtains a linear writeable block of given |capacity| and |usage|. If successful, the
* block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
*
+ * \note The returned buffer may have a larger capacity than requested. In this case the
+ * larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity.
+ *
* \param capacity the size of requested block.
* \param usage the memory usage info for the requested block. Returned blocks will be
* optimized for this usage, but may be used with any usage. One exception:
@@ -926,6 +932,12 @@
* Obtains a circular writeable block of given |capacity| and |usage|. If successful, the
* block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
*
+ * \note The returned buffer may have a larger capacity than requested. In this case the
+ * larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity.
+ *
* \param capacity the size of requested circular block. (note: the size of the obtained
* block could be slightly larger, e.g. to accommodate any system-required
* alignment)
@@ -956,6 +968,12 @@
* Obtains a 2D graphic block of given |width|, |height|, |format| and |usage|. If successful,
* the block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
*
+ * \note The returned buffer may have a larger capacity (width and height) than requested. In
+ * this case the larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity (width and height).
+ *
* \param width the width of requested block (the obtained block could be slightly larger, e.g.
* to accommodate any system-required alignment)
* \param height the height of requested block (the obtained block could be slightly larger,
@@ -1000,6 +1018,12 @@
* fence is signalled when the temporary restriction on fetch is lifted.
* e.g. more memory is available to fetch because some meomory or prior blocks were released.
*
+ * \note The returned buffer may have a larger capacity than requested. In this case the
+ * larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity.
+ *
* \param capacity the size of requested block.
* \param usage the memory usage info for the requested block. Returned blocks will be
* optimized for this usage, but may be used with any usage. One exception:
@@ -1039,6 +1063,12 @@
* fence is signalled when the temporary restriction on fetch is lifted.
* e.g. more memory is available to fetch because some meomory or prior blocks were released.
*
+ * \note The returned buffer may have a larger capacity (width and height) than requested. In
+ * this case the larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity (width and height).
+ *
* \param width the width of requested block (the obtained block could be slightly larger, e.g.
* to accommodate any system-required alignment)
* \param height the height of requested block (the obtained block could be slightly larger,
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 9d9ed70..bdf2027 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -572,7 +572,6 @@
PROFILE_MPEGH_HIGH, ///< MPEG-H High
PROFILE_MPEGH_LC, ///< MPEG-H Low-complexity
PROFILE_MPEGH_BASELINE, ///< MPEG-H Baseline
-
};
enum C2Config::level_t : uint32_t {
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 58a568e..abd8b2d 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -36,13 +36,13 @@
using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
static std::vector<CsdFlushTestParameters> gCsdFlushTestParameters;
-struct CompToURL {
+struct CompToFiles {
std::string mime;
- std::string mURL;
- std::string info;
+ std::string inputFile;
+ std::string infoFile;
};
-std::vector<CompToURL> gCompToURL = {
+std::vector<CompToFiles> gCompToFiles = {
{"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
{"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
"bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
@@ -110,6 +110,15 @@
mTimestampUs = 0u;
mWorkResult = C2_OK;
mTimestampDevTest = false;
+
+ bool valid = getFileNames(mStreamIndex);
+ if (!valid) {
+ GTEST_SKIP() << "No test file for mime " << mMime << " index: " << mStreamIndex;
+ }
+ ALOGV("mStreamIndex : %zu", mStreamIndex);
+ ALOGV("mInputFile : %s", mInputFile.c_str());
+ ALOGV("mInfoFile : %s", mInfoFile.c_str());
+
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -126,7 +135,7 @@
virtual void validateTimestampList(int32_t* bitStreamInfo);
- void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+ bool getFileNames(size_t streamIndex = 0);
struct outputMetaData {
uint64_t timestampUs;
@@ -193,6 +202,10 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ std::string mInputFile;
+ std::string mInfoFile;
+ size_t mStreamIndex = 0;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -204,6 +217,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -285,18 +299,20 @@
}
// LookUpTable of clips and metadata for component testing
-void Codec2AudioDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
+bool Codec2AudioDecHidlTestBase::getFileNames(size_t streamIndex) {
int streamCount = 0;
- for (size_t i = 0; i < gCompToURL.size(); ++i) {
- if (mMime.find(gCompToURL[i].mime) != std::string::npos) {
+
+ for (size_t i = 0; i < gCompToFiles.size(); ++i) {
+ if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
if (streamCount == streamIndex) {
- strcat(mURL, gCompToURL[i].mURL.c_str());
- strcat(info, gCompToURL[i].info.c_str());
- return;
+ mInputFile = sResourceDir + gCompToFiles[i].inputFile;
+ mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
+ return true;
}
streamCount++;
}
}
+ return false;
}
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -429,6 +445,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = std::get<2>(GetParam());
}
};
@@ -436,22 +453,12 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::get<2>(GetParam());
bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info, streamIndex);
- if (!strcmp(mURL, sResourceDir.c_str())) {
- ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
- return;
- }
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
// Reset total no of frames received
mFramesReceived = 0;
@@ -468,9 +475,8 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -507,15 +513,10 @@
description("Test Request for thumbnail");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
int32_t bitStreamInfo[2] = {0};
if (mMime.find("raw") != std::string::npos) {
@@ -529,7 +530,6 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
// request EOS for thumbnail
// signal EOS flag with last frame
@@ -542,7 +542,7 @@
} while (!(flags & SYNC_FRAME));
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -599,15 +599,10 @@
TEST_P(Codec2AudioDecHidlTest, FlushTest) {
description("Tests Flush calls");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
int32_t bitStreamInfo[2] = {0};
if (mMime.find("raw") != std::string::npos) {
@@ -629,9 +624,8 @@
verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
- ALOGV("mURL : %s", mURL);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
// Decode 30 frames and flush.
uint32_t numFramesFlushed = FLUSH_INTERVAL;
@@ -684,15 +678,10 @@
description("Decode with multiple empty input frames");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+ eleInfo.open(mInfoFile);
+ ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
android::Vector<FrameInfo> Info;
int bytesCount = 0;
uint32_t frameId = 0;
@@ -730,8 +719,7 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -759,6 +747,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -768,19 +757,9 @@
description("Tests codecs for flush at different states");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
- if (!strcmp(mURL, sResourceDir.c_str())) {
- ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
- return;
- }
- ALOGV("mURL : %s", mURL);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
int32_t bitStreamInfo[2] = {0};
@@ -797,7 +776,7 @@
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
bool signalEOS = false;
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 92b53a0..d77b943 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -84,6 +84,17 @@
mWorkResult = C2_OK;
mOutputSize = 0u;
getInputMaxBufSize();
+
+ c2_status_t status = getChannelCount(&mNumChannels);
+ ASSERT_EQ(status, C2_OK) << "Unable to get supported channel count";
+
+ status = getSampleRate(&mSampleRate);
+ ASSERT_EQ(status, C2_OK) << "Unable to get supported sample rate";
+
+ status = getSamplesPerFrame(mNumChannels, &mSamplesPerFrame);
+ ASSERT_EQ(status, C2_OK) << "Unable to get supported number of samples per frame";
+
+ getFile(mNumChannels, mSampleRate);
}
virtual void TearDown() override {
@@ -97,7 +108,11 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
- void GetURLForComponent(char* mURL, int32_t channelCount, int32_t sampleRate);
+ c2_status_t getChannelCount(int32_t* nChannels);
+ c2_status_t getSampleRate(int32_t* nSampleRate);
+ c2_status_t getSamplesPerFrame(int32_t nChannels, int32_t* samplesPerFrame);
+
+ void getFile(int32_t channelCount, int32_t sampleRate);
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -145,6 +160,12 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ int32_t mNumChannels;
+ int32_t mSampleRate;
+ int32_t mSamplesPerFrame;
+
+ std::string mInputFile;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -222,14 +243,13 @@
return false;
}
-c2_status_t getChannelCount(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t* nChannels) {
+c2_status_t Codec2AudioEncHidlTestBase::getChannelCount(int32_t* nChannels) {
std::unique_ptr<C2StreamChannelCountInfo::input> channelCount =
std::make_unique<C2StreamChannelCountInfo::input>();
std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
C2FieldSupportedValuesQuery::Current(
C2ParamField(channelCount.get(), &C2StreamChannelCountInfo::value))};
- c2_status_t c2err = component->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
+ c2_status_t c2err = mComponent->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
if (c2err != C2_OK || validValueInfos.size() != 1u) {
ALOGE("querySupportedValues_vb failed for channelCount");
return c2err;
@@ -264,13 +284,11 @@
}
return C2_OK;
}
-
-c2_status_t getSampleRate(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t* nSampleRate) {
- // Use the default sample rate for components
+c2_status_t Codec2AudioEncHidlTestBase::getSampleRate(int32_t* nSampleRate) {
+ // Use the default sample rate for mComponents
std::vector<std::unique_ptr<C2Param>> queried;
- c2_status_t c2err = component->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
- C2_DONT_BLOCK, &queried);
+ c2_status_t c2err = mComponent->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &queried);
if (c2err != C2_OK || queried.size() == 0) return c2err;
size_t offset = sizeof(C2Param);
@@ -280,11 +298,11 @@
return C2_OK;
}
-c2_status_t getSamplesPerFrame(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t nChannels, int32_t* samplesPerFrame) {
+c2_status_t Codec2AudioEncHidlTestBase::getSamplesPerFrame(int32_t nChannels,
+ int32_t* samplesPerFrame) {
std::vector<std::unique_ptr<C2Param>> queried;
- c2_status_t c2err = component->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
- C2_DONT_BLOCK, &queried);
+ c2_status_t c2err = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &queried);
if (c2err != C2_OK || queried.size() == 0) return c2err;
size_t offset = sizeof(C2Param);
@@ -295,24 +313,8 @@
return C2_OK;
}
-// Get config params for a component
-bool getConfigParams(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t* nChannels, int32_t* nSampleRate, int32_t* samplesPerFrame) {
- c2_status_t status = getChannelCount(component, nChannels);
- if (status != C2_OK) return false;
-
- status = getSampleRate(component, nSampleRate);
- if (status != C2_OK) return false;
-
- status = getSamplesPerFrame(component, *nChannels, samplesPerFrame);
- if (status != C2_OK) return false;
-
- return true;
-}
-
// LookUpTable of clips and metadata for component testing
-void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL, int32_t channelCount,
- int32_t sampleRate) {
+void Codec2AudioEncHidlTestBase::getFile(int32_t channelCount, int32_t sampleRate) {
std::string rawInput = "bbb_raw_1ch_8khz_s16le.raw";
if (channelCount == 1 && sampleRate == 16000) {
rawInput = "bbb_raw_1ch_16khz_s16le.raw";
@@ -320,7 +322,7 @@
rawInput = "bbb_raw_2ch_48khz_s16le.raw";
}
- strcat(mURL, rawInput.c_str());
+ mInputFile = sResourceDir + rawInput;
}
void encodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -440,38 +442,23 @@
bool signalEOS = std::get<2>(GetParam());
// Ratio w.r.t to mInputMaxBufSize
int32_t inputMaxBufRatio = std::get<3>(GetParam());
+ mSamplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (mNumChannels * 2));
- int32_t nChannels;
- int32_t nSampleRate;
- int32_t samplesPerFrame;
+ ALOGV("signalEOS %d mInputMaxBufSize %d mSamplesPerFrame %d", signalEOS, mInputMaxBufSize,
+ mSamplesPerFrame);
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
-
- samplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (nChannels * 2));
- ALOGV("signalEOS %d mInputMaxBufSize %d samplesPerFrame %d", signalEOS, mInputMaxBufSize,
- samplesPerFrame);
-
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
+ ASSERT_TRUE(setupConfigParam(mComponent, mNumChannels, mSampleRate))
+ << "Unable to configure for channels: " << mNumChannels << " and sampling rate "
+ << mSampleRate;
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
uint32_t numFrames = 16;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
- ALOGV("mURL : %s", mURL);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(
mComponent, mQueueLock, mQueueCondition, mWorkQueue, mFlushedIndices, mLinearPool,
- eleStream, numFrames, samplesPerFrame, nChannels, nSampleRate, false, signalEOS));
+ eleStream, numFrames, mSamplesPerFrame, mNumChannels, mSampleRate, false, signalEOS));
// If EOS is not sent, sending empty input with EOS flag
if (!signalEOS) {
@@ -545,30 +532,17 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
mFlushedIndices.clear();
- int32_t nChannels;
- int32_t nSampleRate;
- int32_t samplesPerFrame;
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
-
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
+ ASSERT_TRUE(setupConfigParam(mComponent, mNumChannels, mSampleRate))
+ << "Unable to configure for channels: " << mNumChannels << " and sampling rate "
+ << mSampleRate;
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
uint32_t numFramesFlushed = 30;
uint32_t numFrames = 128;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -577,10 +551,9 @@
ASSERT_NO_FATAL_FAILURE(
verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
- ALOGV("mURL : %s", mURL);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFramesFlushed,
- samplesPerFrame, nChannels, nSampleRate));
+ mSamplesPerFrame, mNumChannels, mSampleRate));
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
@@ -590,8 +563,8 @@
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream,
- numFrames - numFramesFlushed, samplesPerFrame, nChannels,
- nSampleRate, true));
+ numFrames - numFramesFlushed, mSamplesPerFrame,
+ mNumChannels, mSampleRate, true));
eleStream.close();
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
@@ -609,33 +582,20 @@
description("Encodes input file for different channel count");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- int32_t nSampleRate;
- int32_t samplesPerFrame;
- int32_t nChannels;
int32_t numFrames = 16;
int32_t maxChannelCount = 8;
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
uint64_t prevOutputSize = 0u;
uint32_t prevChannelCount = 0u;
// Looping through the maximum number of channel count supported by encoder
- for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
+ for (int32_t nChannels = 1; nChannels < maxChannelCount; nChannels++) {
ALOGV("Configuring encoder %s for channel count = %d", mComponentName.c_str(), nChannels);
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ if (!setupConfigParam(mComponent, nChannels, mSampleRate)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -656,9 +616,9 @@
// To check if the input stream is sufficient to encode for the higher channel count
struct stat buf;
- stat(mURL, &buf);
+ stat(mInputFile.c_str(), &buf);
size_t fileSize = buf.st_size;
- int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
+ int32_t bytesCount = (mSamplesPerFrame * nChannels * 2) * numFrames;
if (fileSize < bytesCount) {
std::cout << "[ WARN ] Test Skipped for ChannelCount " << nChannels
<< " because of insufficient input data\n";
@@ -669,7 +629,7 @@
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFrames,
- samplesPerFrame, nChannels, nSampleRate));
+ mSamplesPerFrame, nChannels, mSampleRate));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when config params is not proper but config succeeded
@@ -711,24 +671,11 @@
description("Encodes input file for different SampleRate");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- int32_t nSampleRate;
- int32_t samplesPerFrame;
- int32_t nChannels;
int32_t numFrames = 16;
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
int32_t sampleRateValues[] = {1000, 8000, 16000, 24000, 48000, 96000, 192000};
@@ -737,7 +684,7 @@
for (int32_t nSampleRate : sampleRateValues) {
ALOGV("Configuring encoder %s for SampleRate = %d", mComponentName.c_str(), nSampleRate);
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ if (!setupConfigParam(mComponent, mNumChannels, nSampleRate)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -759,9 +706,9 @@
// To check if the input stream is sufficient to encode for the higher SampleRate
struct stat buf;
- stat(mURL, &buf);
+ stat(mInputFile.c_str(), &buf);
size_t fileSize = buf.st_size;
- int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
+ int32_t bytesCount = (mSamplesPerFrame * mNumChannels * 2) * numFrames;
if (fileSize < bytesCount) {
std::cout << "[ WARN ] Test Skipped for SampleRate " << nSampleRate
<< " because of insufficient input data\n";
@@ -772,7 +719,7 @@
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFrames,
- samplesPerFrame, nChannels, nSampleRate));
+ mSamplesPerFrame, mNumChannels, nSampleRate));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when config params is not proper but config succeeded
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 8d917b3..c331d0b 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -43,13 +43,13 @@
using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
static std::vector<CsdFlushTestParameters> gCsdFlushTestParameters;
-struct CompToURL {
+struct CompToFiles {
std::string mime;
- std::string mURL;
- std::string info;
- std::string chksum;
+ std::string inputFile;
+ std::string infoFile;
+ std::string chksumFile;
};
-std::vector<CompToURL> gCompToURL = {
+std::vector<CompToFiles> gCompToFiles = {
{"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
"bbb_avc_176x144_300kbps_60fps_chksum.md5"},
{"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
@@ -92,8 +92,8 @@
// google.codec2 Video test setup
virtual void SetUp() override {
getParams();
+
mDisableTest = false;
- ALOGV("Codec2VideoDecHidlTest SetUp");
mClient = android::Codec2Client::CreateFromService(
mInstanceName.c_str(),
!bool(android::Codec2Client::CreateFromService("default", true)));
@@ -135,6 +135,15 @@
mDisableTest = true;
}
+ bool valid = getFileNames(mStreamIndex);
+ if (!valid) {
+ GTEST_SKIP() << "No test file for mime " << mMime << " index: " << mStreamIndex;
+ }
+ ALOGV("mStreamIndex : %zu", mStreamIndex);
+ ALOGV("mInputFile : %s", mInputFile.c_str());
+ ALOGV("mInfoFile : %s", mInfoFile.c_str());
+ ALOGV("mChksumFile : %s", mChksumFile.c_str());
+
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -149,8 +158,7 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
- void GetURLChksmForComponent(char* mURL, char* info, char* chksum, size_t streamIndex);
- void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+ bool getFileNames(size_t streamIndex = 0);
/* Calculate the CKSUM for the data in inbuf */
void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
@@ -311,6 +319,11 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ std::string mInputFile;
+ std::string mInfoFile;
+ std::string mChksumFile;
+ size_t mStreamIndex = 0;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -322,6 +335,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -358,27 +372,24 @@
// number of elementary streams per component
#define STREAM_COUNT 3
-// LookUpTable of clips, metadata and chksum for component testing
-void Codec2VideoDecHidlTestBase::GetURLChksmForComponent(char* mURL, char* info, char* chksum,
- size_t streamIndex) {
+// number of elementary streams required for adaptive testing
+#define ADAPTIVE_STREAM_COUNT 2
+// LookUpTable of clips, metadata and mChksumFile for component testing
+bool Codec2VideoDecHidlTestBase::getFileNames(size_t streamIndex) {
int streamCount = 0;
- for (size_t i = 0; i < gCompToURL.size(); ++i) {
- if (mMime.find(gCompToURL[i].mime) != std::string::npos) {
+
+ for (size_t i = 0; i < gCompToFiles.size(); ++i) {
+ if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
if (streamCount == streamIndex) {
- strcat(mURL, gCompToURL[i].mURL.c_str());
- strcat(info, gCompToURL[i].info.c_str());
- strcat(chksum, gCompToURL[i].chksum.c_str());
- return;
+ mInputFile = sResourceDir + gCompToFiles[i].inputFile;
+ mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
+ mChksumFile = sResourceDir + gCompToFiles[i].chksumFile;
+ return true;
}
streamCount++;
}
}
-}
-
-void Codec2VideoDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
- char chksum[512];
- strcpy(chksum, sResourceDir.c_str());
- GetURLChksmForComponent(mURL, info, chksum, streamIndex);
+ return false;
}
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -529,6 +540,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = std::get<2>(GetParam());
}
};
@@ -537,24 +549,13 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::get<2>(GetParam());
bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
- char mURL[512], info[512], chksum[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- strcpy(chksum, sResourceDir.c_str());
-
- GetURLChksmForComponent(mURL, info, chksum, streamIndex);
- if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
- ALOGV("Skipping Test, Stream not available");
- return;
- }
mMd5Enable = true;
- if (!strcmp(chksum, sResourceDir.c_str())) mMd5Enable = false;
+ if (!mChksumFile.compare(sResourceDir)) mMd5Enable = false;
uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
if (!configPixelFormat(format)) {
@@ -565,23 +566,22 @@
mFlushedIndices.clear();
mTimestampUslist.clear();
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
ASSERT_EQ(mComponent->start(), C2_OK);
// Reset total no of frames received
mFramesReceived = 0;
mTimestampUs = 0;
- ALOGV("mURL : %s", mURL);
+
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
size_t refChksmSize = 0;
std::ifstream refChksum;
if (mMd5Enable) {
- ALOGV("chksum file name: %s", chksum);
- refChksum.open(chksum, std::ifstream::binary | std::ifstream::ate);
+ refChksum.open(mChksumFile, std::ifstream::binary | std::ifstream::ate);
ASSERT_EQ(refChksum.is_open(), true);
refChksmSize = refChksum.tellg();
refChksum.seekg(0, std::ifstream::beg);
@@ -650,20 +650,17 @@
uint32_t timestampOffset = 0;
uint32_t offset = 0;
android::Vector<FrameInfo> Info;
- for (uint32_t i = 0; i < STREAM_COUNT * 2; i++) {
- char mURL[512], info[512];
+ for (uint32_t i = 0; i < ADAPTIVE_STREAM_COUNT * 2; i++) {
std::ifstream eleStream, eleInfo;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info, i % STREAM_COUNT);
- if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
+ bool valid = getFileNames(i % ADAPTIVE_STREAM_COUNT);
+ if (!valid) {
ALOGV("Stream not available, skipping this index");
continue;
}
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+ eleInfo.open(mInfoFile);
+ ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
int bytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
@@ -690,8 +687,7 @@
// Reset Total frames before second decode loop
// mFramesReceived = 0;
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info,
@@ -747,15 +743,9 @@
description("Test Request for thumbnail");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
-
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
uint32_t flags = 0;
for (size_t i = 0; i < MAX_ITERATIONS; i++) {
@@ -772,7 +762,7 @@
} while (!(flags & SYNC_FRAME));
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -834,19 +824,12 @@
ASSERT_EQ(mComponent->start(), C2_OK);
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
mFlushedIndices.clear();
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
-
- ALOGV("mURL : %s", mURL);
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -857,7 +840,7 @@
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
// Decode 30 frames and flush. here 30 is chosen to ensure there is a key
// frame after this so that the below section can be covered for all
@@ -910,15 +893,10 @@
description("Decode with multiple empty input frames");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+ eleInfo.open(mInfoFile);
+ ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
android::Vector<FrameInfo> Info;
int bytesCount = 0;
uint32_t frameId = 0;
@@ -946,8 +924,7 @@
eleInfo.close();
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -973,6 +950,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -982,22 +960,15 @@
description("Tests codecs for flush at different states");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
-
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
bool flushedDecoder = false;
bool signalEOS = false;
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index dfd649d..6a00edd 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -88,6 +88,17 @@
mTimestampUs = 0u;
mOutputSize = 0u;
mTimestampDevTest = false;
+ mWidth = ENC_DEFAULT_FRAME_WIDTH;
+ mHeight = ENC_DEFAULT_FRAME_HEIGHT;
+ mMaxWidth = 0;
+ mMaxHeight = 0;
+ mMinWidth = INT32_MAX;
+ mMinHeight = INT32_MAX;
+
+ ASSERT_EQ(getMaxMinResolutionSupported(), C2_OK);
+ mWidth = std::max(std::min(mWidth, mMaxWidth), mMinWidth);
+ mHeight = std::max(std::min(mHeight, mMaxHeight), mMinHeight);
+ ALOGV("mWidth %d mHeight %d", mWidth, mHeight);
C2SecureModeTuning secureModeTuning{};
mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
@@ -96,6 +107,7 @@
mDisableTest = true;
}
+ getFile();
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -109,8 +121,9 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
-
+ void getFile();
bool setupConfigParam(int32_t nWidth, int32_t nHeight, int32_t nBFrame = 0);
+ c2_status_t getMaxMinResolutionSupported();
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -181,6 +194,12 @@
uint32_t mFailedWorkReceived;
uint64_t mTimestampUs;
uint64_t mOutputSize;
+ int32_t mWidth;
+ int32_t mHeight;
+ int32_t mMaxWidth;
+ int32_t mMaxHeight;
+ int32_t mMinWidth;
+ int32_t mMinHeight;
std::list<uint64_t> mTimestampUslist;
std::list<uint64_t> mFlushedIndices;
@@ -197,6 +216,8 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ std::string mInputFile;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -266,9 +287,39 @@
return true;
}
-// LookUpTable of clips for component testing
-void GetURLForComponent(char* URL) {
- strcat(URL, "bbb_352x288_420p_30fps_32frames.yuv");
+void Codec2VideoEncHidlTestBase::getFile() {
+ mInputFile = sResourceDir + "bbb_352x288_420p_30fps_32frames.yuv";
+}
+
+void fillByteBuffer(char* inputBuffer, char* mInputData, uint32_t nWidth, int32_t nHeight) {
+ int width, height, tileWidth, tileHeight;
+ int offset = 0, frmOffset = 0;
+ int numOfPlanes = 3;
+ for (int plane = 0; plane < numOfPlanes; plane++) {
+ if (plane == 0) {
+ width = nWidth;
+ height = nHeight;
+ tileWidth = ENC_DEFAULT_FRAME_WIDTH;
+ tileHeight = ENC_DEFAULT_FRAME_HEIGHT;
+ } else {
+ width = nWidth / 2;
+ tileWidth = ENC_DEFAULT_FRAME_WIDTH / 2;
+ height = nHeight / 2;
+ tileHeight = ENC_DEFAULT_FRAME_HEIGHT / 2;
+ }
+ for (int k = 0; k < height; k += tileHeight) {
+ int rowsToCopy = std::min(height - k, tileHeight);
+ for (int j = 0; j < rowsToCopy; j++) {
+ for (int i = 0; i < width; i += tileWidth) {
+ int colsToCopy = std::min(width - i, tileWidth);
+ memcpy(inputBuffer + (offset + (k + j) * width + i),
+ mInputData + (frmOffset + j * tileWidth), colsToCopy);
+ }
+ }
+ }
+ offset += width * height;
+ frmOffset += tileWidth * tileHeight;
+ }
}
void encodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -314,12 +365,22 @@
ULock l(queueLock);
flushedIndices.emplace_back(frameID);
}
- char* data = (char*)malloc(bytesCount);
- ASSERT_NE(data, nullptr);
- memset(data, 0, bytesCount);
- if (eleStream.is_open()) {
- eleStream.read(data, bytesCount);
- ASSERT_EQ(eleStream.gcount(), bytesCount);
+ std::vector<uint8_t> buffer(bytesCount);
+ char* data = (char*)buffer.data();
+ if (nWidth != ENC_DEFAULT_FRAME_WIDTH || nHeight != ENC_DEFAULT_FRAME_HEIGHT) {
+ int defaultBytesCount = ENC_DEFAULT_FRAME_HEIGHT * ENC_DEFAULT_FRAME_WIDTH * 3 >> 1;
+ std::vector<uint8_t> srcBuffer(defaultBytesCount);
+ char* srcData = (char*)srcBuffer.data();
+ if (eleStream.is_open()) {
+ eleStream.read(srcData, defaultBytesCount);
+ ASSERT_EQ(eleStream.gcount(), defaultBytesCount);
+ }
+ fillByteBuffer(data, srcData, nWidth, nHeight);
+ } else {
+ if (eleStream.is_open()) {
+ eleStream.read(data, bytesCount);
+ ASSERT_EQ(eleStream.gcount(), bytesCount);
+ }
}
std::shared_ptr<C2GraphicBlock> block;
err = graphicPool->fetchGraphicBlock(nWidth, nHeight, HAL_PIXEL_FORMAT_YV12,
@@ -352,7 +413,6 @@
work->input.buffers.emplace_back(new GraphicBuffer(block));
work->worklets.clear();
work->worklets.emplace_back(new C2Worklet);
- free(data);
std::list<std::unique_ptr<C2Work>> items;
items.push_back(std::move(work));
@@ -381,25 +441,65 @@
}
};
+c2_status_t Codec2VideoEncHidlTestBase::getMaxMinResolutionSupported() {
+ std::unique_ptr<C2StreamPictureSizeInfo::input> param =
+ std::make_unique<C2StreamPictureSizeInfo::input>();
+ std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
+ C2FieldSupportedValuesQuery::Current(
+ C2ParamField(param.get(), &C2StreamPictureSizeInfo::width)),
+ C2FieldSupportedValuesQuery::Current(
+ C2ParamField(param.get(), &C2StreamPictureSizeInfo::height))};
+ c2_status_t c2err = mComponent->querySupportedValues(validValueInfos, C2_MAY_BLOCK);
+ if (c2err != C2_OK || validValueInfos.size() != 2u) {
+ ALOGE("querySupportedValues_vb failed for pictureSize");
+ return c2err;
+ }
+
+ const auto& c2FSVWidth = validValueInfos[0].values;
+ const auto& c2FSVHeight = validValueInfos[1].values;
+ switch (c2FSVWidth.type) {
+ case C2FieldSupportedValues::type_t::RANGE: {
+ const auto& widthRange = c2FSVWidth.range;
+ const auto& heightRange = c2FSVHeight.range;
+ mMaxWidth = (uint32_t)(widthRange.max).ref<uint32_t>();
+ mMaxHeight = (uint32_t)(heightRange.max).ref<uint32_t>();
+ mMinWidth = (uint32_t)(widthRange.min).ref<uint32_t>();
+ mMinHeight = (uint32_t)(heightRange.min).ref<uint32_t>();
+ break;
+ }
+ case C2FieldSupportedValues::type_t::VALUES: {
+ int32_t curr = 0;
+ for (const C2Value::Primitive& prim : c2FSVWidth.values) {
+ curr = (uint32_t)prim.ref<uint32_t>();
+ mMaxWidth = std::max(curr, mMaxWidth);
+ mMinWidth = std::min(curr, mMinWidth);
+ }
+ for (const C2Value::Primitive& prim : c2FSVHeight.values) {
+ curr = (uint32_t)prim.ref<uint32_t>();
+ mMaxHeight = std::max(curr, mMaxHeight);
+ mMinHeight = std::min(curr, mMinHeight);
+ }
+ break;
+ }
+ default:
+ ALOGE("Non supported data");
+ return C2_BAD_VALUE;
+ }
+ return C2_OK;
+}
+
TEST_P(Codec2VideoEncEncodeTest, EncodeTest) {
description("Encodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
- int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
- int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
bool signalEOS = std::get<3>(GetParam());
// Send an empty frame to receive CSD data from encoder.
bool sendEmptyFirstFrame = std::get<3>(GetParam());
mConfigBPictures = std::get<4>(GetParam());
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
mTimestampUs = 0;
mTimestampDevTest = true;
@@ -415,10 +515,6 @@
inputFrames--;
}
- if (!setupConfigParam(nWidth, nHeight, mConfigBPictures ? 1 : 0)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
std::vector<std::unique_ptr<C2Param>> inParams;
c2_status_t c2_status = mComponent->query({}, {C2StreamGopTuning::output::PARAM_TYPE},
C2_DONT_BLOCK, &inParams);
@@ -438,6 +534,9 @@
mConfigBPictures = false;
}
}
+ if (!setupConfigParam(mWidth, mHeight, mConfigBPictures ? 1 : 0)) {
+ ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
+ }
ASSERT_EQ(mComponent->start(), C2_OK);
@@ -447,7 +546,7 @@
}
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mGraphicPool, eleStream, mDisableTest,
- inputFrames, ENC_NUM_FRAMES, nWidth, nHeight, false,
+ inputFrames, ENC_NUM_FRAMES, mWidth, mHeight, false,
signalEOS));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when resolution is not proper but config succeeded
@@ -537,15 +636,8 @@
description("Test Request for flush");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
- int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
- int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
- if (!setupConfigParam(nWidth, nHeight)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
+ if (!setupConfigParam(mWidth, mHeight)) {
+ ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
}
ASSERT_EQ(mComponent->start(), C2_OK);
@@ -554,9 +646,9 @@
std::ifstream eleStream;
uint32_t numFramesFlushed = 10;
uint32_t numFrames = ENC_NUM_FRAMES;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
- ALOGV("mURL : %s", mURL);
+
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
@@ -567,7 +659,7 @@
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mGraphicPool, eleStream, mDisableTest, 0,
- numFramesFlushed, nWidth, nHeight, false, false));
+ numFramesFlushed, mWidth, mHeight, false, false));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when resolution is not proper but config succeeded
// In this cases, we skip encoding the input stream
@@ -587,8 +679,8 @@
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mGraphicPool, eleStream, mDisableTest,
- numFramesFlushed, numFrames - numFramesFlushed, nWidth,
- nHeight, true));
+ numFramesFlushed, numFrames - numFramesFlushed, mWidth,
+ mHeight, true));
eleStream.close();
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when resolution is not proper but config succeeded
@@ -719,23 +811,14 @@
description("Encodes input file for different bitrates");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
-
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
mFlushedIndices.clear();
- int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
- int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
- if (!setupConfigParam(nWidth, nHeight)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
+ if (!setupConfigParam(mWidth, mHeight)) {
+ ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
}
ASSERT_EQ(mComponent->start(), C2_OK);
@@ -756,8 +839,8 @@
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mGraphicPool, eleStream,
- mDisableTest, inputFrameId, ENC_NUM_FRAMES, nWidth,
- nHeight, false, false));
+ mDisableTest, inputFrameId, ENC_NUM_FRAMES, mWidth,
+ mHeight, false, false));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when resolution is not proper but config succeeded
// In this cases, we skip encoding the input stream
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index b47e546..d942606 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -515,7 +515,7 @@
}
void AmendOutputFormatWithCodecSpecificData(
- const uint8_t *data, size_t size, const std::string mediaType,
+ const uint8_t *data, size_t size, const std::string &mediaType,
const sp<AMessage> &outputFormat) {
if (mediaType == MIMETYPE_VIDEO_AVC) {
// Codec specific data should be SPS and PPS in a single buffer,
@@ -1479,13 +1479,11 @@
status_t err;
sp<IGraphicBufferProducer> bufferProducer;
- sp<AMessage> inputFormat;
sp<AMessage> outputFormat;
uint64_t usage = 0;
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
- inputFormat = config->mInputFormat;
outputFormat = config->mOutputFormat;
usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
@@ -1521,6 +1519,14 @@
return;
}
+ // Formats can change after setupInputSurface
+ sp<AMessage> inputFormat;
+ {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ inputFormat = config->mInputFormat;
+ outputFormat = config->mOutputFormat;
+ }
mCallback->onInputSurfaceCreated(
inputFormat,
outputFormat,
@@ -1570,13 +1576,11 @@
}
void CCodec::setInputSurface(const sp<PersistentSurface> &surface) {
- sp<AMessage> inputFormat;
sp<AMessage> outputFormat;
uint64_t usage = 0;
{
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
- inputFormat = config->mInputFormat;
outputFormat = config->mOutputFormat;
usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
@@ -1608,6 +1612,14 @@
mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
return;
}
+ // Formats can change after setupInputSurface
+ sp<AMessage> inputFormat;
+ {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ inputFormat = config->mInputFormat;
+ outputFormat = config->mOutputFormat;
+ }
mCallback->onInputSurfaceAccepted(inputFormat, outputFormat);
}
@@ -2264,7 +2276,12 @@
}
}
if (config->mInputSurface) {
- config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+ if (work->worklets.empty()
+ || !work->worklets.back()
+ || (work->worklets.back()->output.flags
+ & C2FrameData::FLAG_INCOMPLETE) == 0) {
+ config->mInputSurface->onInputBufferDone(work->input.ordinal.frameIndex);
+ }
}
if (initDataWatcher.hasChanged()) {
initData = initDataWatcher.update();
@@ -2639,7 +2656,11 @@
*maxUsage = 0;
continue;
}
- *minUsage |= supported.values[0].u64;
+ if (supported.values.size() > 1) {
+ *minUsage |= supported.values[1].u64;
+ } else {
+ *minUsage |= supported.values[0].u64;
+ }
int64_t currentMaxUsage = 0;
for (const C2Value::Primitive &flags : supported.values) {
currentMaxUsage |= flags.u64;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 727b1ff..27e87e6 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1188,11 +1188,14 @@
bool changed = false;
if (domain & mInputDomain) {
- sp<AMessage> oldFormat = mInputFormat->dup();
+ sp<AMessage> oldFormat = mInputFormat;
+ mInputFormat = mInputFormat->dup(); // trigger format changed
mInputFormat->extend(getFormatForDomain(reflected, mInputDomain));
if (mInputFormat->countEntries() != oldFormat->countEntries()
|| mInputFormat->changesFrom(oldFormat)->countEntries() > 0) {
changed = true;
+ } else {
+ mInputFormat = oldFormat; // no change
}
}
if (domain & mOutputDomain) {
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index fc4ee51..8fc2ef5 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -679,17 +679,20 @@
std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
#ifdef __LP64__
static std::once_flag s_checkOnce;
- static bool s_64bitonly {false};
+ static bool s_is64bitOk {true};
std::call_once(s_checkOnce, [&](){
const std::string abi32list =
::android::base::GetProperty("ro.product.cpu.abilist32", "");
- if (abi32list.empty()) {
- s_64bitonly = true;
+ if (!abi32list.empty()) {
+ int32_t inputSurfaceSetting =
+ ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
+ s_is64bitOk = inputSurfaceSetting != 0;
}
});
- if (!s_64bitonly) {
- ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object");
+ if (!s_is64bitOk) {
+ ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
+ "when debug.stagefright.c2inputsurface is set to 0");
return nullptr;
}
#endif
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 3ddae01..50d600c 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -61,24 +61,24 @@
/// Input Surface configuration
struct Config {
// IN PARAMS (GBS)
- float mMinFps; // minimum fps (repeat frame to achieve this)
- float mMaxFps; // max fps (via frame drop)
- float mCaptureFps; // capture fps
- float mCodedFps; // coded fps
- bool mSuspended; // suspended
- int64_t mTimeOffsetUs; // time offset (input => codec)
- int64_t mSuspendAtUs; // suspend/resume time
- int64_t mStartAtUs; // start time
- bool mStopped; // stopped
- int64_t mStopAtUs; // stop time
+ float mMinFps = 0.0; // minimum fps (repeat frame to achieve this)
+ float mMaxFps = 0.0; // max fps (via frame drop)
+ float mCaptureFps = 0.0; // capture fps
+ float mCodedFps = 0.0; // coded fps
+ bool mSuspended = false; // suspended
+ int64_t mTimeOffsetUs = 0; // time offset (input => codec)
+ int64_t mSuspendAtUs = 0; // suspend/resume time
+ int64_t mStartAtUs = 0; // start time
+ bool mStopped = false; // stopped
+ int64_t mStopAtUs = 0; // stop time
// OUT PARAMS (GBS)
- int64_t mInputDelayUs; // delay between encoder input and surface input
+ int64_t mInputDelayUs = 0; // delay between encoder input and surface input
// IN PARAMS (CODEC WRAPPER)
- float mFixedAdjustedFps; // fixed fps via PTS manipulation
- float mMinAdjustedFps; // minimum fps via PTS manipulation
- uint64_t mUsage; // consumer usage
+ float mFixedAdjustedFps = 0.0; // fixed fps via PTS manipulation
+ float mMinAdjustedFps = 0.0; // minimum fps via PTS manipulation
+ uint64_t mUsage = 0; // consumer usage
};
/**
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 0966988..5f87c66 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -507,9 +507,21 @@
};
}
+// Matrix coefficient to convert RGB to Planar YUV data.
+// Each sub-array represents the 3X3 coeff used with R, G and B
+static const int16_t bt601Matrix[2][3][3] = {
+ { { 76, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+ { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } }, /* RANGE_LIMITED */
+};
+
+static const int16_t bt709Matrix[2][3][3] = {
+ { { 54, 183, 18 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+ { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
+};
+
status_t ConvertRGBToPlanarYUV(
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
- const C2GraphicView &src) {
+ const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
CHECK(dstY != nullptr);
CHECK((src.width() & 1) == 0);
CHECK((src.height() & 1) == 0);
@@ -527,28 +539,38 @@
const uint8_t *pGreen = src.data()[C2PlanarLayout::PLANE_G];
const uint8_t *pBlue = src.data()[C2PlanarLayout::PLANE_B];
-#define CLIP3(x,y,z) (((z) < (x)) ? (x) : (((z) > (y)) ? (y) : (z)))
+ // set default range as limited
+ if (colorRange != C2Color::RANGE_FULL && colorRange != C2Color::RANGE_LIMITED) {
+ colorRange = C2Color::RANGE_LIMITED;
+ }
+ const int16_t (*weights)[3] =
+ (colorMatrix == C2Color::MATRIX_BT709) ?
+ bt709Matrix[colorRange - 1] : bt601Matrix[colorRange - 1];
+ uint8_t zeroLvl = colorRange == C2Color::RANGE_FULL ? 0 : 16;
+ uint8_t maxLvlLuma = colorRange == C2Color::RANGE_FULL ? 255 : 235;
+ uint8_t maxLvlChroma = colorRange == C2Color::RANGE_FULL ? 255 : 240;
+
+#define CLIP3(min,v,max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
for (size_t y = 0; y < src.height(); ++y) {
for (size_t x = 0; x < src.width(); ++x) {
- uint8_t red = *pRed;
- uint8_t green = *pGreen;
- uint8_t blue = *pBlue;
+ uint8_t r = *pRed;
+ uint8_t g = *pGreen;
+ uint8_t b = *pBlue;
- // using ITU-R BT.601 conversion matrix
- unsigned luma =
- CLIP3(0, (((red * 66 + green * 129 + blue * 25) >> 8) + 16), 255);
+ unsigned luma = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2]) >> 8) +
+ zeroLvl;
- dstY[x] = luma;
+ dstY[x] = CLIP3(zeroLvl, luma, maxLvlLuma);
if ((x & 1) == 0 && (y & 1) == 0) {
- unsigned U =
- CLIP3(0, (((-red * 38 - green * 74 + blue * 112) >> 8) + 128), 255);
+ unsigned U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2]) >> 8) +
+ 128;
- unsigned V =
- CLIP3(0, (((red * 112 - green * 94 - blue * 18) >> 8) + 128), 255);
+ unsigned V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2]) >> 8) +
+ 128;
- dstU[x >> 1] = U;
- dstV[x >> 1] = V;
+ dstU[x >> 1] = CLIP3(zeroLvl, U, maxLvlChroma);
+ dstV[x >> 1] = CLIP3(zeroLvl, V, maxLvlChroma);
}
pRed += layout.planes[C2PlanarLayout::PLANE_R].colInc;
pGreen += layout.planes[C2PlanarLayout::PLANE_G].colInc;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index af29e81..9fa642d 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -18,6 +18,7 @@
#define CODEC2_BUFFER_UTILS_H_
#include <C2Buffer.h>
+#include <C2Config.h>
#include <C2ParamDef.h>
#include <media/hardware/VideoAPI.h>
@@ -39,7 +40,8 @@
*/
status_t ConvertRGBToPlanarYUV(
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
- const C2GraphicView &src);
+ const C2GraphicView &src, C2Color::matrix_t colorMatrix = C2Color::MATRIX_BT601,
+ C2Color::range_t colorRange = C2Color::RANGE_LIMITED);
/**
* Returns a planar YUV 420 8-bit media image descriptor.
diff --git a/media/codec2/vndk/C2AllocatorBlob.cpp b/media/codec2/vndk/C2AllocatorBlob.cpp
index 6340cba..8cfa1d7 100644
--- a/media/codec2/vndk/C2AllocatorBlob.cpp
+++ b/media/codec2/vndk/C2AllocatorBlob.cpp
@@ -178,6 +178,8 @@
return C2_CORRUPTED;
}
+ // Note: the BLOB allocator does not support padding as this functionality is expected
+ // to be provided by the gralloc implementation.
std::shared_ptr<C2GraphicAllocation> graphicAllocation;
c2_status_t status = mC2AllocatorGralloc->newGraphicAllocation(
capacity, kLinearBufferHeight, kLinearBufferFormat, usage, &graphicAllocation);
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index a8528df..77b265a 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -417,15 +417,16 @@
buffer = -1;
}
}
- return new Impl(ionFd, allocSize, bufferFd, buffer, id, ret);
-
+ // the padding is not usable so deduct it from the advertised capacity
+ return new Impl(ionFd, allocSize - sPadding, bufferFd, buffer, id, ret);
} else {
ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
"returned (%d) ; bufferFd = %d",
ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
- return new ImplV2(ionFd, allocSize, bufferFd, id, ret);
+ // the padding is not usable so deduct it from the advertised capacity
+ return new ImplV2(ionFd, allocSize - sPadding, bufferFd, id, ret);
}
}
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 6d8552a..1aa3d69 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -111,8 +111,27 @@
virtual bool equals(const std::shared_ptr<C2LinearAllocation>& other) const override;
// internal methods
- C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name, unsigned flags,
- C2Allocator::id_t id);
+
+ /**
+ * Constructs an allocation via a new allocation.
+ *
+ * @param alloc allocator
+ * @param allocSize size used for the allocator
+ * @param capacity capacity advertised to the client
+ * @param heap_name name of the dmabuf heap (device)
+ * @param flags flags
+ * @param id allocator id
+ */
+ C2DmaBufAllocation(BufferAllocator& alloc, size_t allocSize, size_t capacity,
+ C2String heap_name, unsigned flags, C2Allocator::id_t id);
+
+ /**
+ * Constructs an allocation by wrapping an existing allocation.
+ *
+ * @param size capacity advertised to the client
+ * @param shareFd dmabuf fd of the wrapped allocation
+ * @param id allocator id
+ */
C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id);
c2_status_t status() const;
@@ -246,19 +265,19 @@
}
}
-C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name,
- unsigned flags, C2Allocator::id_t id)
- : C2LinearAllocation(size), mHandle(-1, 0) {
+C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t allocSize, size_t capacity,
+ C2String heap_name, unsigned flags, C2Allocator::id_t id)
+ : C2LinearAllocation(capacity), mHandle(-1, 0) {
int bufferFd = -1;
int ret = 0;
- bufferFd = alloc.Alloc(heap_name, size, flags);
+ bufferFd = alloc.Alloc(heap_name, allocSize, flags);
if (bufferFd < 0) {
ret = bufferFd;
}
// this may be a non-working handle if bufferFd is negative
- mHandle = C2HandleBuf(bufferFd, size);
+ mHandle = C2HandleBuf(bufferFd, capacity);
mId = id;
mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
}
@@ -381,7 +400,7 @@
size_t allocSize = (size_t)capacity + sPadding;
// TODO: should we align allocation size to mBlockSize to reflect the true allocation size?
std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
- mBufferAllocator, allocSize, heap_name, flags, getId());
+ mBufferAllocator, allocSize, allocSize - sPadding, heap_name, flags, getId());
ret = alloc->status();
if (ret == C2_OK) {
*allocation = alloc;
diff --git a/media/libeffects/visualizer/Android.bp b/media/libeffects/visualizer/Android.bp
new file mode 100644
index 0000000..335251a
--- /dev/null
+++ b/media/libeffects/visualizer/Android.bp
@@ -0,0 +1,41 @@
+package {
+ default_applicable_licenses: ["frameworks_av_media_libeffects_visualizer_license"],
+}
+
+// See: http://go/android-license-faq
+license {
+ name: "frameworks_av_media_libeffects_visualizer_license",
+ visibility: [":__subpackages__"],
+ license_kinds: [
+ "SPDX-license-identifier-Apache-2.0",
+ ],
+ license_text: [
+ "NOTICE",
+ ],
+}
+
+// Visualizer library
+cc_library_shared {
+ name: "libvisualizer",
+ vendor: true,
+ srcs: ["EffectVisualizer.cpp"],
+ cflags: [
+ "-O2",
+ "-fvisibility=hidden",
+ "-Wall",
+ "-Werror",
+ "-DBUILD_FLOAT",
+ "-DSUPPORT_MC",
+ ],
+ shared_libs: [
+ "libcutils",
+ "liblog",
+ "libdl",
+ ],
+ relative_install_path: "soundfx",
+ header_libs: [
+ "libhardware_headers",
+ "libaudioeffects",
+ "libaudioutils_headers",
+ ],
+}
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
deleted file mode 100644
index 68c7cf2..0000000
--- a/media/libeffects/visualizer/Android.mk
+++ /dev/null
@@ -1,31 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-# Visualizer library
-include $(CLEAR_VARS)
-
-LOCAL_VENDOR_MODULE := true
-LOCAL_SRC_FILES:= \
- EffectVisualizer.cpp
-
-LOCAL_CFLAGS+= -O2 -fvisibility=hidden
-LOCAL_CFLAGS += -Wall -Werror
-LOCAL_CFLAGS += -DBUILD_FLOAT -DSUPPORT_MC
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- liblog \
- libdl
-
-LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libvisualizer
-LOCAL_LICENSE_KINDS:= SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS:= notice
-LOCAL_NOTICE_FILE:= $(LOCAL_PATH)/NOTICE
-
-LOCAL_C_INCLUDES := \
- $(call include-path-for, audio-effects) \
- $(call include-path-for, audio-utils)
-
-
-LOCAL_HEADER_LIBRARIES += libhardware_headers
-include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 189fef0..c89c023 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -35,54 +35,6 @@
using media::VolumeShaper;
-enum {
- DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
- SET_DATA_SOURCE_URL,
- SET_DATA_SOURCE_FD,
- SET_DATA_SOURCE_STREAM,
- SET_DATA_SOURCE_CALLBACK,
- SET_DATA_SOURCE_RTP,
- SET_BUFFERING_SETTINGS,
- GET_BUFFERING_SETTINGS,
- PREPARE_ASYNC,
- START,
- STOP,
- IS_PLAYING,
- SET_PLAYBACK_SETTINGS,
- GET_PLAYBACK_SETTINGS,
- SET_SYNC_SETTINGS,
- GET_SYNC_SETTINGS,
- PAUSE,
- SEEK_TO,
- GET_CURRENT_POSITION,
- GET_DURATION,
- RESET,
- NOTIFY_AT,
- SET_AUDIO_STREAM_TYPE,
- SET_LOOPING,
- SET_VOLUME,
- INVOKE,
- SET_METADATA_FILTER,
- GET_METADATA,
- SET_AUX_EFFECT_SEND_LEVEL,
- ATTACH_AUX_EFFECT,
- SET_VIDEO_SURFACETEXTURE,
- SET_PARAMETER,
- GET_PARAMETER,
- SET_RETRANSMIT_ENDPOINT,
- GET_RETRANSMIT_ENDPOINT,
- SET_NEXT_PLAYER,
- APPLY_VOLUME_SHAPER,
- GET_VOLUME_SHAPER_STATE,
- // Modular DRM
- PREPARE_DRM,
- RELEASE_DRM,
- // AudioRouting
- SET_OUTPUT_DEVICE,
- GET_ROUTED_DEVICE_ID,
- ENABLE_AUDIO_DEVICE_CALLBACK,
-};
-
// ModDrm helpers
static status_t readVector(const Parcel& reply, Vector<uint8_t>& vector) {
uint32_t size = 0;
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index 3548a1e..28684d1 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -137,6 +137,56 @@
virtual status_t setOutputDevice(audio_port_handle_t deviceId) = 0;
virtual status_t getRoutedDeviceId(audio_port_handle_t *deviceId) = 0;
virtual status_t enableAudioDeviceCallback(bool enabled) = 0;
+protected:
+
+ friend class IMediaPlayerTest;
+ enum {
+ DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
+ SET_DATA_SOURCE_URL,
+ SET_DATA_SOURCE_FD,
+ SET_DATA_SOURCE_STREAM,
+ SET_DATA_SOURCE_CALLBACK,
+ SET_DATA_SOURCE_RTP,
+ SET_BUFFERING_SETTINGS,
+ GET_BUFFERING_SETTINGS,
+ PREPARE_ASYNC,
+ START,
+ STOP,
+ IS_PLAYING,
+ SET_PLAYBACK_SETTINGS,
+ GET_PLAYBACK_SETTINGS,
+ SET_SYNC_SETTINGS,
+ GET_SYNC_SETTINGS,
+ PAUSE,
+ SEEK_TO,
+ GET_CURRENT_POSITION,
+ GET_DURATION,
+ RESET,
+ NOTIFY_AT,
+ SET_AUDIO_STREAM_TYPE,
+ SET_LOOPING,
+ SET_VOLUME,
+ INVOKE,
+ SET_METADATA_FILTER,
+ GET_METADATA,
+ SET_AUX_EFFECT_SEND_LEVEL,
+ ATTACH_AUX_EFFECT,
+ SET_VIDEO_SURFACETEXTURE,
+ SET_PARAMETER,
+ GET_PARAMETER,
+ SET_RETRANSMIT_ENDPOINT,
+ GET_RETRANSMIT_ENDPOINT,
+ SET_NEXT_PLAYER,
+ APPLY_VOLUME_SHAPER,
+ GET_VOLUME_SHAPER_STATE,
+ // Modular DRM
+ PREPARE_DRM,
+ RELEASE_DRM,
+ // AudioRouting
+ SET_OUTPUT_DEVICE,
+ GET_ROUTED_DEVICE_ID,
+ ENABLE_AUDIO_DEVICE_CALLBACK,
+ };
};
// ----------------------------------------------------------------------------
diff --git a/media/libmedia/tests/mediaplayer/Android.bp b/media/libmedia/tests/mediaplayer/Android.bp
new file mode 100644
index 0000000..0fff7b4
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/Android.bp
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_test {
+ name: "IMediaPlayerTest",
+ test_suites: ["device-tests", "mts"],
+ gtest: true,
+
+ srcs: [
+ "IMediaPlayerTest.cpp",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "liblog",
+ "libmedia",
+ "libstagefright",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+ compile_multilib: "first",
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+}
diff --git a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
new file mode 100644
index 0000000..097e8ef
--- /dev/null
+++ b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <binder/IServiceManager.h>
+#include <binder/Parcel.h>
+#include <gtest/gtest.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/IMediaPlayer.h>
+#include <media/IMediaPlayerService.h>
+#include <media/mediaplayer.h>
+
+namespace android {
+
+constexpr uint8_t kMockByteArray[16] = {};
+
+ class IMediaPlayerTest : public testing::Test {
+ protected:
+ static constexpr uint32_t PREPARE_DRM = IMediaPlayer::PREPARE_DRM;
+
+ void SetUp() override {
+ mediaPlayer_ = new MediaPlayer();
+ sp<IServiceManager> serviceManager = defaultServiceManager();
+ sp<IBinder> mediaPlayerService = serviceManager->getService(String16("media.player"));
+ sp<IMediaPlayerService> iMediaPlayerService =
+ IMediaPlayerService::asInterface(mediaPlayerService);
+ iMediaPlayer_ = iMediaPlayerService->create(mediaPlayer_);
+ }
+
+ sp<MediaPlayer> mediaPlayer_;
+ sp<IMediaPlayer> iMediaPlayer_;
+ };
+
+TEST_F(IMediaPlayerTest, PrepareDrmInvalidTransaction) {
+ Parcel data, reply;
+ data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+ data.write(kMockByteArray, 16);
+
+ // We write a length greater than the following session id array. Should be discarded.
+ data.writeUint32(2);
+ data.writeUnpadded(kMockByteArray, 1);
+
+ status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+ ->transact(PREPARE_DRM, data, &reply);
+ ASSERT_EQ(result, BAD_VALUE);
+}
+
+TEST_F(IMediaPlayerTest, PrepareDrmValidTransaction) {
+ Parcel data, reply;
+ data.writeInterfaceToken(iMediaPlayer_->getInterfaceDescriptor());
+ data.write(kMockByteArray, 16);
+
+ // We write a length equal to the length of the following data. The transaction should be valid.
+ data.writeUint32(1);
+ data.write(kMockByteArray, 1);
+
+ status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
+ ->transact(PREPARE_DRM, data, &reply);
+ ASSERT_EQ(result, OK);
+}
+} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index b43df38..d2d978a 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -124,8 +124,16 @@
// index(i) should be started from 1. 0 is reserved for [root]
mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
mRTPConn->setSelfID(info->mSelfID);
- mRTPConn->setJbTime(
- (info->mJbTimeMs <= 3000 && info->mJbTimeMs >= 40) ? info->mJbTimeMs : 300);
+ mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
+
+ unsigned long PT;
+ AString formatDesc, formatParams;
+ // index(i) should be started from 1. 0 is reserved for [root]
+ desc->getFormatType(i + 1, &PT, &formatDesc, &formatParams);
+
+ int32_t clockRate, numChannels;
+ ASessionDescription::ParseFormatDesc(formatDesc.c_str(), &clockRate, &numChannels);
+ info->mTimeScale = clockRate;
info->mRTPSocket = sockRtp;
info->mRTCPSocket = sockRtcp;
@@ -146,10 +154,8 @@
if (info->mIsAudio) {
mAudioTrack = source;
- info->mTimeScale = 16000;
} else {
mVideoTrack = source;
- info->mTimeScale = 90000;
}
info->mSource = source;
@@ -680,7 +686,7 @@
newTrackInfo.mIsAudio = isAudioKey;
mTracks.push(newTrackInfo);
info = &mTracks.editTop();
- info->mJbTimeMs = 300;
+ info->mJbTimeMs = kStaticJitterTimeMs;
}
if (key == "rtp-param-mime-type") {
@@ -724,7 +730,8 @@
int64_t networkHandle = atoll(value);
setSocketNetwork(networkHandle);
} else if (key == "rtp-param-jitter-buffer-time") {
- info->mJbTimeMs = atoi(value);
+ // clamping min at 40, max at 3000
+ info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
}
return OK;
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 4ba85ba..6ae3718 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -237,13 +237,10 @@
}
bool isHDR(const sp<AMessage> &format) {
- uint32_t standard, range, transfer;
+ uint32_t standard, transfer;
if (!format->findInt32("color-standard", (int32_t*)&standard)) {
standard = 0;
}
- if (!format->findInt32("color-range", (int32_t*)&range)) {
- range = 0;
- }
if (!format->findInt32("color-transfer", (int32_t*)&transfer)) {
transfer = 0;
}
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 6c4addf..0107c32 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -169,9 +169,7 @@
}
status_t MediaCodecSource::Puller::setStopTimeUs(int64_t stopTimeUs) {
- sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, this);
- msg->setInt64("stop-time-us", stopTimeUs);
- return postSynchronouslyAndReturnError(msg);
+ return mSource->setStopTimeUs(stopTimeUs);
}
status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> ¬ify) {
@@ -189,19 +187,11 @@
}
void MediaCodecSource::Puller::stop() {
- bool interrupt = false;
- {
- // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
- // stop.
- Mutexed<Queue>::Locked queue(mQueue);
- queue->mPulling = false;
- interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
- queue->flush(); // flush any unprocessed pulled buffers
- }
-
- if (interrupt) {
- interruptSource();
- }
+ // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
+ // stop.
+ Mutexed<Queue>::Locked queue(mQueue);
+ queue->mPulling = false;
+ queue->flush(); // flush any unprocessed pulled buffers
}
void MediaCodecSource::Puller::interruptSource() {
@@ -685,9 +675,9 @@
if (mStopping && reachedEOS) {
ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
if (mPuller != NULL) {
- mPuller->stopSource();
+ mPuller->interruptSource();
}
- ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
+ ALOGI("source (%s) stopped", mIsVideo ? "video" : "audio");
// posting reply to everyone that's waiting
List<sp<AReplyToken>>::iterator it;
for (it = mStopReplyIDQueue.begin();
@@ -896,7 +886,7 @@
{
int32_t eos = 0;
if (msg->findInt32("eos", &eos) && eos) {
- ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
+ ALOGI("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
signalEOS();
break;
}
@@ -1114,12 +1104,7 @@
if (generation != mGeneration) {
break;
}
-
- if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
- ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
- mPuller->interruptSource();
- ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
- }
+ ALOGD("source (%s) stopping stalled", mIsVideo ? "video" : "audio");
signalEOS();
break;
}
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dd2eed3..c51c048 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -183,7 +183,7 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="!slow-cpu">
- <Limit name="size" min="2x2" max="1920x1080" />
+ <Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
<Limit name="block-count" range="1-16384" />
diff --git a/media/libstagefright/foundation/ABuffer.cpp b/media/libstagefright/foundation/ABuffer.cpp
index c8965d9..c79384c 100644
--- a/media/libstagefright/foundation/ABuffer.cpp
+++ b/media/libstagefright/foundation/ABuffer.cpp
@@ -67,7 +67,7 @@
void ABuffer::setRange(size_t offset, size_t size) {
CHECK_LE(offset, mCapacity);
- CHECK_LE(offset + size, mCapacity);
+ CHECK_LE(size, mCapacity - offset);
mRangeOffset = offset;
mRangeLength = size;
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 2f93d5d..e1cc5ec 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -34,6 +34,8 @@
namespace android {
+const double JITTER_MULTIPLE = 1.5f;
+
// static
AAVCAssembler::AAVCAssembler(const sp<AMessage> ¬ify)
: mNotifyMsg(notify),
@@ -43,6 +45,7 @@
mAccessUnitDamaged(false),
mFirstIFrameProvided(false),
mLastIFrameProvidedAtMs(0),
+ mLastRtpTimeJitterDataUs(0),
mWidth(0),
mHeight(0) {
}
@@ -121,24 +124,70 @@
sp<ABuffer> buffer = *queue->begin();
buffer->meta()->setObject("source", source);
+ /**
+ * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+ * But that is not useful as an ingredient of buffering time.
+ * Instead, we calculates the time only for all 'NAL units'.
+ */
int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+ int64_t nowTimeUs = ALooper::GetNowUs();
+ if (rtpTime != mLastRtpTimeJitterDataUs) {
+ source->putBaseJitterData(rtpTime, nowTimeUs);
+ mLastRtpTimeJitterDataUs = rtpTime;
+ }
+ source->putInterArrivalJitterData(rtpTime, nowTimeUs);
- int64_t startTime = source->mFirstSysTime / 1000;
- int64_t nowTime = ALooper::GetNowUs() / 1000;
- int64_t playedTime = nowTime - startTime;
+ const int64_t startTimeMs = source->mFirstSysTime / 1000;
+ const int64_t nowTimeMs = nowTimeUs / 1000;
+ const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
+ const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
+ const int32_t dynamicJitterTimeMs = source->getInterArrivalJitterTimeMs();
+ const int64_t clockRate = source->mClockRate;
- int64_t playedTimeRtp = source->mFirstRtpTime + playedTime * (int64_t)source->mClockRate / 1000;
- const int64_t jitterTime = source->mJbTimeMs * (int64_t)source->mClockRate / 1000;
+ int64_t playedTimeMs = nowTimeMs - startTimeMs;
+ int64_t playedTimeRtp = source->mFirstRtpTime + MsToRtp(playedTimeMs, clockRate);
- int64_t expiredTimeInJb = rtpTime + jitterTime;
- bool isExpired = expiredTimeInJb <= (playedTimeRtp);
- bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
- bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
+ /**
+ * Based on experiences in real commercial network services,
+ * 300 ms is a maximum heuristic jitter buffer time for video RTP service.
+ */
+
+ /**
+ * The base jitter is an expected additional propagation time.
+ * We can drop packets if the time doesn't meet our standards.
+ * If it gets shorter, we can get faster response but should drop delayed packets.
+ * Expecting range : 50ms ~ 1000ms (But 300 ms would be practical upper bound)
+ */
+ const int32_t baseJbTimeMs = std::min(std::max(staticJitterTimeMs, baseJitterTimeMs), 300);
+ /**
+ * Dynamic jitter is a variance of interarrival time as defined in the 6.4.1 of RFC 3550.
+ * We can regard this as a tolerance of every data putting moments.
+ * Expecting range : 0ms ~ 150ms (Not to over 300 ms practically)
+ */
+ const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
+ const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
+ /* Fundamental jitter time */
+ const int32_t jitterTimeMs = baseJbTimeMs;
+ const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
+
+ // Till (T), this assembler waits unconditionally to collect current NAL unit
+ int64_t expiredTimeRtp = rtpTime + jitterTimeRtp; // When does this buffer expire ? (T)
+ int64_t diffTimeRtp = playedTimeRtp - expiredTimeRtp;
+ bool isExpired = (diffTimeRtp >= 0); // It's expired if T is passed away
+
+ // From (T), this assembler tries to complete the NAL till (T + try)
+ int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+ int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
+ bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
+
+ // After (T + try), it gives last chance till (T + try + a) with warning messages.
+ int64_t alpha = dynamicJbTimeRtp * JITTER_MULTIPLE; // Use Dyn as 'a'
+ bool isSecondLineBroken = (diffTimeRtp > (tryJbTimeRtp + alpha)); // The Maginot line
if (mShowQueue && mShowQueueCnt < 20) {
showCurrentQueue(queue);
- printNowTimeUs(startTime, nowTime, playedTime);
- printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+ printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
mShowQueueCnt++;
}
@@ -149,17 +198,23 @@
return NOT_ENOUGH_DATA;
}
- if (isTooLate200) {
- ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
- }
+ if (isFirstLineBroken) {
+ if (isSecondLineBroken) {
+ int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+ ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+ "Seq# %d \t ExpSeq# %d \t"
+ "JitterMs %d + (%d + %d * %.3f)",
+ (long long)diffTimeRtp, (long long)totalDiffTimeMs,
+ buffer->int32Data(), mNextExpectedSeqNo,
+ jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
- if (isTooLate300) {
- ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
- (long long)(playedTimeRtp - expiredTimeInJb), buffer->int32Data());
- printNowTimeUs(startTime, nowTime, playedTime);
- printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
-
- mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTime);
+ mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
+ } else {
+ ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
+ jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+ }
}
if (mNextExpectedSeqNoValid) {
@@ -170,6 +225,7 @@
source->noticeAbandonBuffer(cntRemove);
ALOGW("delete %d of %d buffers", cntRemove, size);
}
+
if (queue->empty()) {
return NOT_ENOUGH_DATA;
}
@@ -565,17 +621,6 @@
msg->post();
}
-inline int64_t AAVCAssembler::findRTPTime(
- const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
- /* If you want to +, -, * rtpTime, recommend to declare rtpTime as int64_t.
- Because rtpTime can be near UINT32_MAX. Beware the overflow. */
- int64_t rtpTime = 0;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
- int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
- return rtpTime | overflowMask;
-}
-
int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
sp<ABuffer> buffer = *(queue->begin());
@@ -620,16 +665,6 @@
return initSize - queue->size();
}
-inline void AAVCAssembler::printNowTimeUs(int64_t start, int64_t now, int64_t play) {
- ALOGD("start=%lld, now=%lld, played=%lld",
- (long long)start, (long long)now, (long long)play);
-}
-
-inline void AAVCAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
- ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
- (long long)rtp, (long long)play, (long long)exp, isExp);
-}
-
ARTPAssembler::AssemblyStatus AAVCAssembler::assembleMore(
const sp<ARTPSource> &source) {
AssemblyStatus status = addNALUnit(source);
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 9d71e2f..8d19773 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -48,6 +48,7 @@
bool mAccessUnitDamaged;
bool mFirstIFrameProvided;
uint64_t mLastIFrameProvidedAtMs;
+ int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
int32_t mHeight;
List<sp<ABuffer> > mNALUnits;
@@ -63,13 +64,10 @@
void submitAccessUnit();
- inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
size_t avail, float goodRatio);
int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
- void printNowTimeUs(int64_t start, int64_t now, int64_t play);
- void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
};
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index 553ea08..d32e85d 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -41,6 +41,8 @@
namespace android {
+const double JITTER_MULTIPLE = 1.5f;
+
// static
AHEVCAssembler::AHEVCAssembler(const sp<AMessage> ¬ify)
: mNotifyMsg(notify),
@@ -50,6 +52,7 @@
mAccessUnitDamaged(false),
mFirstIFrameProvided(false),
mLastIFrameProvidedAtMs(0),
+ mLastRtpTimeJitterDataUs(0),
mWidth(0),
mHeight(0) {
@@ -130,23 +133,71 @@
sp<ABuffer> buffer = *queue->begin();
buffer->meta()->setObject("source", source);
+
+ /**
+ * RFC3550 calculates the interarrival jitter time for 'ALL packets'.
+ * But that is not useful as an ingredient of buffering time.
+ * Instead, we calculates the time only for all 'NAL units'.
+ */
int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+ int64_t nowTimeUs = ALooper::GetNowUs();
+ if (rtpTime != mLastRtpTimeJitterDataUs) {
+ source->putBaseJitterData(rtpTime, nowTimeUs);
+ mLastRtpTimeJitterDataUs = rtpTime;
+ }
+ source->putInterArrivalJitterData(rtpTime, nowTimeUs);
- int64_t startTime = source->mFirstSysTime / 1000;
- int64_t nowTime = ALooper::GetNowUs() / 1000;
- int64_t playedTime = nowTime - startTime;
- int64_t playedTimeRtp = source->mFirstRtpTime + playedTime * (int64_t)source->mClockRate / 1000;
- const int64_t jitterTime = source->mJbTimeMs * (int64_t)source->mClockRate / 1000;
+ const int64_t startTimeMs = source->mFirstSysTime / 1000;
+ const int64_t nowTimeMs = nowTimeUs / 1000;
+ const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
+ const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
+ const int32_t dynamicJitterTimeMs = source->getInterArrivalJitterTimeMs();
+ const int64_t clockRate = source->mClockRate;
- int64_t expiredTimeInJb = rtpTime + jitterTime;
- bool isExpired = expiredTimeInJb <= (playedTimeRtp);
- bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
- bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
+ int64_t playedTimeMs = nowTimeMs - startTimeMs;
+ int64_t playedTimeRtp = source->mFirstRtpTime + MsToRtp(playedTimeMs, clockRate);
+
+ /**
+ * Based on experiences in real commercial network services,
+ * 300 ms is a maximum heuristic jitter buffer time for video RTP service.
+ */
+
+ /**
+ * The base jitter is an expected additional propagation time.
+ * We can drop packets if the time doesn't meet our standards.
+ * If it gets shorter, we can get faster response but should drop delayed packets.
+ * Expecting range : 50ms ~ 1000ms (But 300 ms would be practical upper bound)
+ */
+ const int32_t baseJbTimeMs = std::min(std::max(staticJitterTimeMs, baseJitterTimeMs), 300);
+ /**
+ * Dynamic jitter is a variance of interarrival time as defined in the 6.4.1 of RFC 3550.
+ * We can regard this as a tolerance of every data putting moments.
+ * Expecting range : 0ms ~ 150ms (Not to over 300 ms practically)
+ */
+ const int32_t dynamicJbTimeMs = std::min(dynamicJitterTimeMs, 150);
+ const int64_t dynamicJbTimeRtp = MsToRtp(dynamicJbTimeMs, clockRate);
+ /* Fundamental jitter time */
+ const int32_t jitterTimeMs = baseJbTimeMs;
+ const int64_t jitterTimeRtp = MsToRtp(jitterTimeMs, clockRate);
+
+ // Till (T), this assembler waits unconditionally to collect current NAL unit
+ int64_t expiredTimeRtp = rtpTime + jitterTimeRtp; // When does this buffer expire ? (T)
+ int64_t diffTimeRtp = playedTimeRtp - expiredTimeRtp;
+ bool isExpired = (diffTimeRtp >= 0); // It's expired if T is passed away
+
+ // From (T), this assembler tries to complete the NAL till (T + try)
+ int32_t tryJbTimeMs = baseJitterTimeMs / 2 + dynamicJbTimeMs;
+ int64_t tryJbTimeRtp = MsToRtp(tryJbTimeMs, clockRate);
+ bool isFirstLineBroken = (diffTimeRtp > tryJbTimeRtp);
+
+ // After (T + try), it gives last chance till (T + try + a) with warning messages.
+ int64_t alpha = dynamicJbTimeRtp * JITTER_MULTIPLE; // Use Dyn as 'a'
+ bool isSecondLineBroken = (diffTimeRtp > (tryJbTimeRtp + alpha)); // The Maginot line
if (mShowQueueCnt < 20) {
showCurrentQueue(queue);
- printNowTimeUs(startTime, nowTime, playedTime);
- printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+ printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
mShowQueueCnt++;
}
@@ -157,17 +208,23 @@
return NOT_ENOUGH_DATA;
}
- if (isTooLate200) {
- ALOGW("=== WARNING === buffer arrived 200ms late. === WARNING === ");
- }
+ if (isFirstLineBroken) {
+ if (isSecondLineBroken) {
+ int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+ ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+ "Seq# %d \t ExpSeq# %d \t"
+ "JitterMs %d + (%d + %d * %.3f)",
+ (long long)diffTimeRtp, (long long)totalDiffTimeMs,
+ buffer->int32Data(), mNextExpectedSeqNo,
+ jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
+ printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
- if (isTooLate300) {
- ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
- (long long)(playedTimeRtp - expiredTimeInJb), buffer->int32Data());
- printNowTimeUs(startTime, nowTime, playedTime);
- printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
-
- mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTime);
+ mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
+ } else {
+ ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
+ jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+ }
}
if (mNextExpectedSeqNoValid) {
@@ -578,17 +635,6 @@
msg->post();
}
-inline int64_t AHEVCAssembler::findRTPTime(
- const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
- /* If you want to +, -, * rtpTime, recommend to declare rtpTime as int64_t.
- Because rtpTime can be near UINT32_MAX. Beware the overflow. */
- int64_t rtpTime = 0;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
- int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
- return rtpTime | overflowMask;
-}
-
int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
sp<ABuffer> buffer = *(queue->begin());
@@ -633,16 +679,6 @@
return initSize - queue->size();
}
-inline void AHEVCAssembler::printNowTimeUs(int64_t start, int64_t now, int64_t play) {
- ALOGD("start=%lld, now=%lld, played=%lld",
- (long long)start, (long long)now, (long long)play);
-}
-
-inline void AHEVCAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
- ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
- (long long)rtp, (long long)play, (long long)exp, isExp);
-}
-
ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
const sp<ARTPSource> &source) {
AssemblyStatus status = addNALUnit(source);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index bf1cded..68777a7 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -49,6 +49,7 @@
bool mAccessUnitDamaged;
bool mFirstIFrameProvided;
uint64_t mLastIFrameProvidedAtMs;
+ int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
int32_t mHeight;
List<sp<ABuffer> > mNALUnits;
@@ -64,13 +65,10 @@
void submitAccessUnit();
- inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
size_t avail, float goodRatio);
int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
- void printNowTimeUs(int64_t start, int64_t now, int64_t play);
- void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
};
diff --git a/media/libstagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/ARTPAssembler.h
index 191f08e..f959c40 100644
--- a/media/libstagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/ARTPAssembler.h
@@ -19,6 +19,9 @@
#define A_RTP_ASSEMBLER_H_
#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
#include <utils/List.h>
#include <utils/RefBase.h>
@@ -61,12 +64,47 @@
bool mShowQueue;
int32_t mShowQueueCnt;
+ // Utility functions
+ inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+ inline int64_t MsToRtp(int64_t ms, int64_t clockRate);
+ inline int64_t RtpToMs(int64_t rtp, int64_t clockRate);
+ inline void printNowTimeMs(int64_t start, int64_t now, int64_t play);
+ inline void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
+
private:
int64_t mFirstFailureTimeUs;
DISALLOW_EVIL_CONSTRUCTORS(ARTPAssembler);
};
+inline int64_t ARTPAssembler::findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
+ /* If you want to +,-,* rtpTime, recommend to declare rtpTime as int64_t.
+ Because rtpTime can be near UINT32_MAX. Beware the overflow. */
+ int64_t rtpTime = 0;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
+ int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
+ return rtpTime | overflowMask;
+}
+
+inline int64_t ARTPAssembler::MsToRtp(int64_t ms, int64_t clockRate) {
+ return ms * clockRate / 1000;
+}
+
+inline int64_t ARTPAssembler::RtpToMs(int64_t rtp, int64_t clockRate) {
+ return rtp * 1000 / clockRate;
+}
+
+inline void ARTPAssembler::printNowTimeMs(int64_t start, int64_t now, int64_t play) {
+ ALOGD("start=%lld, now=%lld, played=%lld",
+ (long long)start, (long long)now, (long long)play);
+}
+
+inline void ARTPAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
+ ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
+ (long long)rtp, (long long)play, (long long)exp, isExp);
+}
+
} // namespace android
#endif // A_RTP_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 61c06d1..33c85a7 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -70,6 +70,8 @@
bool mIsInjected;
+ // A place to save time when it polls
+ int64_t mLastPollTimeUs;
// RTCP Extension for CVO
int mCVOExtMap; // will be set to 0 if cvo is not negotiated in sdp
};
@@ -80,7 +82,7 @@
mLastReceiverReportTimeUs(-1),
mLastBitrateReportTimeUs(-1),
mTargetBitrate(-1),
- mJbTimeMs(300) {
+ mStaticJitterTimeMs(kStaticJitterTimeMs) {
}
ARTPConnection::~ARTPConnection() {
@@ -416,6 +418,7 @@
return;
}
+ int64_t nowUs = ALooper::GetNowUs();
int res = select(maxSocket + 1, &rs, NULL, NULL, &tv);
if (res > 0) {
@@ -425,6 +428,7 @@
++it;
continue;
}
+ it->mLastPollTimeUs = nowUs;
status_t err = OK;
if (FD_ISSET(it->mRTPSocket, &rs)) {
@@ -486,7 +490,6 @@
}
}
- int64_t nowUs = ALooper::GetNowUs();
checkRxBitrate(nowUs);
if (mLastReceiverReportTimeUs <= 0
@@ -1066,7 +1069,7 @@
}
source->setSelfID(mSelfID);
- source->setJbTime(mJbTimeMs > 0 ? mJbTimeMs : 300);
+ source->setStaticJitterTimeMs(mStaticJitterTimeMs);
info->mSources.add(srcId, source);
} else {
source = info->mSources.valueAt(index);
@@ -1086,8 +1089,8 @@
mSelfID = selfID;
}
-void ARTPConnection::setJbTime(const uint32_t jbTimeMs) {
- mJbTimeMs = jbTimeMs;
+void ARTPConnection::setStaticJitterTimeMs(const uint32_t jbTimeMs) {
+ mStaticJitterTimeMs = jbTimeMs;
}
void ARTPConnection::setTargetBitrate(int32_t targetBitrate) {
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index a37ac0e..ea0a374 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -46,7 +46,7 @@
void injectPacket(int index, const sp<ABuffer> &buffer);
void setSelfID(const uint32_t selfID);
- void setJbTime(const uint32_t jbTimeMs);
+ void setStaticJitterTimeMs(const uint32_t jbTimeMs);
void setTargetBitrate(int32_t targetBitrate);
// Creates a pair of UDP datagram sockets bound to adjacent ports
@@ -89,7 +89,7 @@
int32_t mSelfID;
int32_t mTargetBitrate;
- uint32_t mJbTimeMs;
+ uint32_t mStaticJitterTimeMs;
int32_t mCumulativeBytes;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 3fdf8e4..8787d65 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -48,7 +48,6 @@
mFirstRtpTime(0),
mFirstSysTime(0),
mClockRate(0),
- mJbTimeMs(300), // default jitter buffer time is 300ms.
mFirstSsrc(0),
mHighestNackNumber(0),
mID(id),
@@ -59,6 +58,7 @@
mPrevNumBuffersReceived(0),
mPrevExpectedForRR(0),
mPrevNumBuffersReceivedForRR(0),
+ mStaticJbTimeMs(kStaticJitterTimeMs),
mLastNTPTime(0),
mLastNTPTimeUpdateUs(0),
mIssueFIRRequests(false),
@@ -102,6 +102,11 @@
if (mAssembler != NULL && !mAssembler->initCheck()) {
mAssembler.clear();
}
+
+ int32_t clockRate, numChannels;
+ ASessionDescription::ParseFormatDesc(desc.c_str(), &clockRate, &numChannels);
+ mClockRate = clockRate;
+ mJitterCalc = new JitterCalc(mClockRate);
}
static uint32_t AbsDiff(uint32_t seq1, uint32_t seq2) {
@@ -139,9 +144,9 @@
mBaseSeqNumber = seqNum;
mFirstRtpTime = firstRtpTime;
mFirstSsrc = ssrc;
- ALOGD("first-rtp arrived: first-rtp-time=%d, sys-time=%lld, seq-num=%u, ssrc=%d",
+ ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
- mClockRate = 90000;
+ mJitterCalc->init(mFirstRtpTime, mFirstSysTime, 0, mStaticJbTimeMs * 1000);
mQueue.push_back(buffer);
return true;
}
@@ -327,10 +332,11 @@
data[18] = (mHighestSeqNumber >> 8) & 0xff;
data[19] = mHighestSeqNumber & 0xff;
- data[20] = 0x00; // Interarrival jitter
- data[21] = 0x00;
- data[22] = 0x00;
- data[23] = 0x00;
+ uint32_t jitterTime = 0;
+ data[20] = jitterTime >> 24; // Interarrival jitter
+ data[21] = (jitterTime >> 16) & 0xff;
+ data[22] = (jitterTime >> 8) & 0xff;
+ data[23] = jitterTime & 0xff;
uint32_t LSR = 0;
uint32_t DLSR = 0;
@@ -508,15 +514,35 @@
kSourceID = selfID;
}
-void ARTPSource::setJbTime(const uint32_t jbTimeMs) {
- mJbTimeMs = jbTimeMs;
-}
-
void ARTPSource::setPeriodicFIR(bool enable) {
ALOGD("setPeriodicFIR %d", enable);
mIssueFIRRequests = enable;
}
+int32_t ARTPSource::getStaticJitterTimeMs() {
+ return mStaticJbTimeMs;
+}
+
+int32_t ARTPSource::getBaseJitterTimeMs() {
+ return mJitterCalc->getBaseJitterMs();
+}
+
+int32_t ARTPSource::getInterArrivalJitterTimeMs() {
+ return mJitterCalc->getInterArrivalJitterMs();
+}
+
+void ARTPSource::setStaticJitterTimeMs(const uint32_t jbTimeMs) {
+ mStaticJbTimeMs = jbTimeMs;
+}
+
+void ARTPSource::putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime) {
+ mJitterCalc->putBaseData(timeStamp, arrivalTime);
+}
+
+void ARTPSource::putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime) {
+ mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
+}
+
bool ARTPSource::isNeedToEarlyNotify() {
uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
int32_t intervalExpectedInNow = expected - mPrevExpected;
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index c51fd8a..0edff23 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -27,8 +27,12 @@
#include <map>
+#include "JitterCalculator.h"
+
namespace android {
+const uint32_t kStaticJitterTimeMs = 50; // 50ms
+
struct ABuffer;
struct AMessage;
struct ARTPAssembler;
@@ -64,8 +68,15 @@
void setSeqNumToNACK(uint16_t seqNum, uint16_t mask, uint16_t nowJitterHeadSeqNum);
uint32_t getSelfID();
void setSelfID(const uint32_t selfID);
- void setJbTime(const uint32_t jbTimeMs);
void setPeriodicFIR(bool enable);
+
+ int32_t getStaticJitterTimeMs();
+ int32_t getBaseJitterTimeMs();
+ int32_t getInterArrivalJitterTimeMs();
+ void setStaticJitterTimeMs(const uint32_t jbTimeMs);
+ void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
+ void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
+
bool isNeedToEarlyNotify();
void notifyPktInfo(int32_t bitrate, bool isRegular);
// FIR needs to be sent by missing packet or broken video image.
@@ -78,7 +89,6 @@
int64_t mFirstSysTime;
int32_t mClockRate;
- uint32_t mJbTimeMs;
int32_t mFirstSsrc;
int32_t mHighestNackNumber;
@@ -96,6 +106,9 @@
List<sp<ABuffer> > mQueue;
sp<ARTPAssembler> mAssembler;
+ int32_t mStaticJbTimeMs;
+ sp<JitterCalc> mJitterCalc;
+
typedef struct infoNACK {
uint16_t seqNum;
uint16_t mask;
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index dcadbaf..34d1788 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -36,6 +36,7 @@
"ARTPWriter.cpp",
"ARTSPConnection.cpp",
"ASessionDescription.cpp",
+ "JitterCalculator.cpp",
"SDPLoader.cpp",
],
diff --git a/media/libstagefright/rtsp/JitterCalculator.cpp b/media/libstagefright/rtsp/JitterCalculator.cpp
new file mode 100644
index 0000000..93b5a83
--- /dev/null
+++ b/media/libstagefright/rtsp/JitterCalculator.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "JitterCalc"
+#include <utils/Log.h>
+
+#include "JitterCalculator.h"
+
+#include <stdlib.h>
+
+namespace android {
+
+JitterCalc::JitterCalc(int32_t clockRate)
+ : mClockRate(clockRate) {
+ init(0, 0, 0, 0);
+}
+
+void JitterCalc::init(uint32_t rtpTime, int64_t arrivalTimeUs, int32_t base, int32_t inter) {
+ mFirstTimeStamp = rtpTime;
+ mLastTimeStamp = rtpTime;
+ mFirstArrivalTimeUs = arrivalTimeUs;
+ mLastArrivalTimeUs = arrivalTimeUs;
+
+ mBaseJitterUs = base;
+ mInterArrivalJitterUs = inter;
+}
+
+void JitterCalc::putBaseData(int64_t rtpTime, int64_t arrivalTimeUs) {
+ // A RTP time wraps around after UINT32_MAX. We must consider this case.
+ const int64_t UINT32_MSB = 0x80000000;
+ int64_t overflowMask = (mFirstTimeStamp & UINT32_MSB & ~rtpTime) << 1;
+ int64_t tempRtpTime = overflowMask | rtpTime;
+
+ // Base jitter implementation can be various
+ int64_t scheduledTimeUs = (tempRtpTime - (int64_t)mFirstTimeStamp) * 1000000ll / mClockRate;
+ int64_t elapsedTimeUs = arrivalTimeUs - mFirstArrivalTimeUs;
+ int64_t correctionTimeUs = elapsedTimeUs - scheduledTimeUs; // additional propagation delay;
+ mBaseJitterUs = (mBaseJitterUs * 15 + correctionTimeUs) / 16;
+ ALOGV("BaseJitterUs : %lld \t\t correctionTimeUs : %lld",
+ (long long)mBaseJitterUs, (long long)correctionTimeUs);
+}
+
+void JitterCalc::putInterArrivalData(int64_t rtpTime, int64_t arrivalTimeUs) {
+ const int64_t UINT32_MSB = 0x80000000;
+ int64_t tempRtpTime = rtpTime;
+ int64_t tempLastTimeStamp = mLastTimeStamp;
+
+ // A RTP time wraps around after UINT32_MAX. We must consider this case.
+ int64_t overflowMask = (mLastTimeStamp ^ rtpTime) & UINT32_MSB;
+ tempRtpTime |= ((overflowMask & ~rtpTime) << 1);
+ tempLastTimeStamp |= ((overflowMask & ~mLastTimeStamp) << 1);
+
+ // 6.4.1 of RFC3550 defines this interarrival jitter value.
+ int64_t diffTimeStampUs = abs(tempRtpTime - tempLastTimeStamp) * 1000000ll / mClockRate;
+ int64_t diffArrivalUs = arrivalTimeUs - mLastArrivalTimeUs; // Can't be minus
+ ALOGV("diffTimeStampUs %lld \t\t diffArrivalUs %lld",
+ (long long)diffTimeStampUs, (long long)diffArrivalUs);
+
+ int64_t varianceUs = diffArrivalUs - diffTimeStampUs;
+ mInterArrivalJitterUs = (mInterArrivalJitterUs * 15 + abs(varianceUs)) / 16;
+
+ mLastTimeStamp = (uint32_t)rtpTime;
+ mLastArrivalTimeUs = arrivalTimeUs;
+}
+
+int32_t JitterCalc::getBaseJitterMs() {
+ return mBaseJitterUs / 1000;
+}
+
+int32_t JitterCalc::getInterArrivalJitterMs() {
+ return mInterArrivalJitterUs / 1000;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/rtsp/JitterCalculator.h b/media/libstagefright/rtsp/JitterCalculator.h
new file mode 100644
index 0000000..ff36f1f
--- /dev/null
+++ b/media/libstagefright/rtsp/JitterCalculator.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_JITTER_CALCULATOR_H_
+
+#define A_JITTER_CALCULATOR_H_
+
+#include <stdint.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class JitterCalc : public RefBase {
+private:
+ // Time Stamp per Second
+ const int32_t mClockRate;
+
+ uint32_t mFirstTimeStamp;
+ uint32_t mLastTimeStamp;
+ int64_t mFirstArrivalTimeUs;
+ int64_t mLastArrivalTimeUs;
+
+ int32_t mBaseJitterUs;
+ int32_t mInterArrivalJitterUs;
+
+public:
+ JitterCalc(int32_t clockRate);
+
+ void init(uint32_t rtpTime, int64_t arrivalTimeUs, int32_t base, int32_t inter);
+ void putInterArrivalData(int64_t rtpTime, int64_t arrivalTime);
+ void putBaseData(int64_t rtpTime, int64_t arrivalTimeUs);
+ int32_t getBaseJitterMs();
+ int32_t getInterArrivalJitterMs();
+};
+
+} // namespace android
+
+#endif // A_JITTER_CALCULATOR_H_
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 65e74e6..bcbc7fc 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -85,9 +85,6 @@
dictionary: "dictionaries/formats.dict",
defaults: ["libstagefright_fuzzer_defaults"],
static_libs: [
- "libstagefright_webm",
"libdatasource",
- "libstagefright_esds",
- "libogg",
],
}
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index c8b4a03..4d50d66 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -74,6 +74,7 @@
MtpFfsHandle::MtpFfsHandle(int controlFd) {
mControl.reset(controlFd);
+ mBatchCancel = android::base::GetBoolProperty("sys.usb.mtp.batchcancel", false);
}
MtpFfsHandle::~MtpFfsHandle() {}
@@ -370,7 +371,7 @@
}
int MtpFfsHandle::cancelEvents(struct iocb **iocb, struct io_event *events, unsigned start,
- unsigned end) {
+ unsigned end, bool is_batch_cancel) {
// Some manpages for io_cancel are out of date and incorrect.
// io_cancel will return -EINPROGRESS on success and does
// not place the event in the given memory. We have to use
@@ -386,6 +387,10 @@
} else {
num_events++;
}
+ if (is_batch_cancel && num_events == 1) {
+ num_events = end - start;
+ break;
+ }
}
if (num_events != end - start) {
ret = -1;
@@ -495,7 +500,8 @@
num_events += this_events;
if (event_ret == -1) {
- cancelEvents(mIobuf[i].iocb.data(), ioevs, num_events, mIobuf[i].actual);
+ cancelEvents(mIobuf[i].iocb.data(), ioevs, num_events, mIobuf[i].actual,
+ mBatchCancel);
return -1;
}
ret += event_ret;
@@ -512,7 +518,7 @@
}
}
if (short_packet) {
- if (cancelEvents(mIobuf[i].iocb.data(), ioevs, short_i, mIobuf[i].actual)) {
+ if (cancelEvents(mIobuf[i].iocb.data(), ioevs, short_i, mIobuf[i].actual, false)) {
write_error = true;
}
}
@@ -613,7 +619,7 @@
&num_events) != ret) {
error = true;
cancelEvents(mIobuf[(i-1)%NUM_IO_BUFS].iocb.data(), ioevs, num_events,
- mIobuf[(i-1)%NUM_IO_BUFS].actual);
+ mIobuf[(i-1)%NUM_IO_BUFS].actual, false);
}
has_write = false;
}
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index fe343f7..e552e03 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -17,6 +17,7 @@
#ifndef _MTP_FFS_HANDLE_H
#define _MTP_FFS_HANDLE_H
+#include <android-base/properties.h>
#include <android-base/unique_fd.h>
#include <linux/aio_abi.h>
#include <mutex>
@@ -57,6 +58,7 @@
static int getPacketSize(int ffs_fd);
bool mCanceled;
+ bool mBatchCancel;
android::base::unique_fd mControl;
// "in" from the host's perspective => sink for mtp server
@@ -76,7 +78,8 @@
int iobufSubmit(struct io_buffer *buf, int fd, unsigned length, bool read);
// Cancel submitted requests from start to end in the given array. Return 0 or -1.
- int cancelEvents(struct iocb **iocb, struct io_event *events, unsigned start, unsigned end);
+ int cancelEvents(struct iocb **iocb, struct io_event *events, unsigned start, unsigned end,
+ bool is_batch_cancel);
// Wait for at minimum the given number of events. Returns the amount of data in the returned
// events. Increments counter by the number of events returned.
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index 2e06da5..4b44dcf 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -69,7 +69,6 @@
java_defaults {
name: "MediaBenchmark-defaults",
- sdk_version: "system_current",
min_sdk_version: "28",
- target_sdk_version: "29",
+ target_sdk_version: "30",
}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index b2aee1a..b222d47 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -17,21 +17,21 @@
buildscript {
repositories {
google()
- jcenter()
+ mavenCentral()
}
dependencies {
- classpath 'com.android.tools.build:gradle:3.5.0'
+ classpath 'com.android.tools.build:gradle:4.2.1'
}
}
apply plugin: 'com.android.application'
android {
- compileSdkVersion 29
+ compileSdkVersion 30
defaultConfig {
applicationId "com.android.media.benchmark"
minSdkVersion 28
- targetSdkVersion 29
+ targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
@@ -57,20 +57,20 @@
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
- version "3.10.2"
+ version "3.18.1"
}
}
}
repositories {
google()
- jcenter()
+ mavenCentral()
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
- implementation 'androidx.appcompat:appcompat:1.1.0'
- testImplementation 'junit:junit:4.12'
- androidTestImplementation 'androidx.test:runner:1.2.0'
- androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+ implementation 'androidx.appcompat:appcompat:1.3.0'
+ testImplementation 'junit:junit:4.13.2'
+ androidTestImplementation 'androidx.test:runner:1.3.0'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.2'
}
\ No newline at end of file
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
index af92424..0192d68 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/cpp/Android.bp
@@ -9,7 +9,6 @@
cc_test_library {
name: "libmediabenchmark_jni",
- sdk_version: "current",
defaults: [
"libmediabenchmark_common-defaults",
diff --git a/media/tests/benchmark/src/native/common/Android.bp b/media/tests/benchmark/src/native/common/Android.bp
index 6b54c6a..718d217 100644
--- a/media/tests/benchmark/src/native/common/Android.bp
+++ b/media/tests/benchmark/src/native/common/Android.bp
@@ -55,7 +55,6 @@
cc_defaults {
name: "libmediabenchmark-defaults",
- sdk_version: "current",
stl: "c++_shared",
shared_libs: [
diff --git a/media/tests/benchmark/src/native/extractor/Extractor.cpp b/media/tests/benchmark/src/native/extractor/Extractor.cpp
index f0bb3b9..3bdfbad 100644
--- a/media/tests/benchmark/src/native/extractor/Extractor.cpp
+++ b/media/tests/benchmark/src/native/extractor/Extractor.cpp
@@ -124,9 +124,7 @@
int64_t sTime = mStats->getCurTime();
if (mExtractor) {
- // TODO: (b/140128505) Multiple calls result in DoS.
- // Uncomment call to AMediaExtractor_delete() once this is resolved
- // AMediaExtractor_delete(mExtractor);
+ AMediaExtractor_delete(mExtractor);
mExtractor = nullptr;
}
int64_t eTime = mStats->getCurTime();
diff --git a/media/tests/benchmark/tests/Android.bp b/media/tests/benchmark/tests/Android.bp
index 0fbd20d..9a8caa3 100644
--- a/media/tests/benchmark/tests/Android.bp
+++ b/media/tests/benchmark/tests/Android.bp
@@ -33,7 +33,12 @@
srcs: ["ExtractorTest.cpp"],
- static_libs: ["libmediabenchmark_extractor"]
+ static_libs: ["libmediabenchmark_extractor"],
+
+ shared_libs: [
+ "libbase",
+ "libbinder_ndk",
+ ],
}
cc_test {
@@ -50,6 +55,11 @@
"libmediabenchmark_extractor",
"libmediabenchmark_decoder",
],
+
+ shared_libs: [
+ "libbase",
+ "libbinder_ndk",
+ ],
}
cc_test {
diff --git a/media/tests/benchmark/tests/DecoderTest.cpp b/media/tests/benchmark/tests/DecoderTest.cpp
index 81ef02a..3666724 100644
--- a/media/tests/benchmark/tests/DecoderTest.cpp
+++ b/media/tests/benchmark/tests/DecoderTest.cpp
@@ -21,6 +21,8 @@
#include <iostream>
#include <limits>
+#include <android/binder_process.h>
+
#include "BenchmarkTestEnvironment.h"
#include "Decoder.h"
@@ -175,6 +177,7 @@
"c2.android.hevc.decoder", true)));
int main(int argc, char **argv) {
+ ABinderProcess_startThreadPool();
gEnv = new BenchmarkTestEnvironment();
::testing::AddGlobalTestEnvironment(gEnv);
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/tests/benchmark/tests/ExtractorTest.cpp b/media/tests/benchmark/tests/ExtractorTest.cpp
index d14d15b..27ee9ba 100644
--- a/media/tests/benchmark/tests/ExtractorTest.cpp
+++ b/media/tests/benchmark/tests/ExtractorTest.cpp
@@ -19,6 +19,8 @@
#include <gtest/gtest.h>
+#include <android/binder_process.h>
+
#include "BenchmarkTestEnvironment.h"
#include "Extractor.h"
@@ -73,6 +75,7 @@
0)));
int main(int argc, char **argv) {
+ ABinderProcess_startThreadPool();
gEnv = new BenchmarkTestEnvironment();
::testing::AddGlobalTestEnvironment(gEnv);
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 59d74de..819e146 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -39,10 +39,9 @@
static std::atomic<int> curAudioHalPids = 0;
if (update) {
- audioHalPids[(curAudioHalPids + 1) % kNumAudioHalPidsVectors] = *pids;
- curAudioHalPids++;
+ audioHalPids[(curAudioHalPids++ + 1) % kNumAudioHalPidsVectors] = *pids;
} else {
- *pids = audioHalPids[curAudioHalPids];
+ *pids = audioHalPids[curAudioHalPids % kNumAudioHalPidsVectors];
}
}
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 9770054..d26a601 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -226,10 +226,23 @@
sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
+ // presentationComplete checked by frames. (Mixed Tracks).
// framesWritten is cumulative, never reset, and is shared all tracks
// audioHalFrames is derived from output latency
- // FIXME parameters not needed, could get them from the thread
bool presentationComplete(int64_t framesWritten, size_t audioHalFrames);
+
+ // presentationComplete checked by time. (Direct Tracks).
+ bool presentationComplete(uint32_t latencyMs);
+
+ void resetPresentationComplete() {
+ mPresentationCompleteFrames = 0;
+ mPresentationCompleteTimeNs = 0;
+ }
+
+ // notifyPresentationComplete is called when presentationComplete() detects
+ // that the track is finished stopping.
+ void notifyPresentationComplete();
+
void signalClientFlag(int32_t flag);
/** Set that a metadata has changed and needs to be notified to backend. Thread safe. */
@@ -262,9 +275,6 @@
int32_t *mAuxBuffer;
int mAuxEffectId;
bool mHasVolumeController;
- size_t mPresentationCompleteFrames; // number of frames written to the
- // audio HAL when this track will be fully rendered
- // zero means not monitoring
// access these three variables only when holding thread lock.
LinearMap<int64_t> mFrameMap; // track frame to server frame mapping
@@ -300,6 +310,14 @@
for (auto& tp : mTeePatches) { f(tp.patchTrack); }
};
+ size_t mPresentationCompleteFrames = 0; // (Used for Mixed tracks)
+ // The number of frames written to the
+ // audio HAL when this track is considered fully rendered.
+ // Zero means not monitoring.
+ int64_t mPresentationCompleteTimeNs = 0; // (Used for Direct tracks)
+ // The time when this track is considered fully rendered.
+ // Zero means not monitoring.
+
// The following fields are only for fast tracks, and should be in a subclass
int mFastIndex; // index within FastMixerState::mFastTracks[];
// either mFastIndex == -1 if not isFastTrack()
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 88c9ba5..61108f3 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -799,7 +799,7 @@
if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT) s.append("front-left, ");
if (mask & AUDIO_CHANNEL_OUT_FRONT_RIGHT) s.append("front-right, ");
if (mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) s.append("front-center, ");
- if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low freq, ");
+ if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) s.append("low-frequency, ");
if (mask & AUDIO_CHANNEL_OUT_BACK_LEFT) s.append("back-left, ");
if (mask & AUDIO_CHANNEL_OUT_BACK_RIGHT) s.append("back-right, ");
if (mask & AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER) s.append("front-left-of-center, ");
@@ -819,7 +819,7 @@
if (mask & AUDIO_CHANNEL_OUT_BOTTOM_FRONT_LEFT) s.append("bottom-front-left, ");
if (mask & AUDIO_CHANNEL_OUT_BOTTOM_FRONT_CENTER) s.append("bottom-front-center, ");
if (mask & AUDIO_CHANNEL_OUT_BOTTOM_FRONT_RIGHT) s.append("bottom-front-right, ");
- if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) s.append("low_frequency_2, ");
+ if (mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY_2) s.append("low-frequency-2, ");
if (mask & AUDIO_CHANNEL_OUT_HAPTIC_B) s.append("haptic-B, ");
if (mask & AUDIO_CHANNEL_OUT_HAPTIC_A) s.append("haptic-A, ");
if (mask & ~AUDIO_CHANNEL_OUT_ALL) s.append("unknown, ");
@@ -839,7 +839,7 @@
if (mask & AUDIO_CHANNEL_IN_BACK_LEFT) s.append("back-left, ");
if (mask & AUDIO_CHANNEL_IN_BACK_RIGHT) s.append("back-right, ");
if (mask & AUDIO_CHANNEL_IN_CENTER) s.append("center, ");
- if (mask & AUDIO_CHANNEL_IN_LOW_FREQUENCY) s.append("low freq, ");
+ if (mask & AUDIO_CHANNEL_IN_LOW_FREQUENCY) s.append("low-frequency, ");
if (mask & AUDIO_CHANNEL_IN_TOP_LEFT) s.append("top-left, ");
if (mask & AUDIO_CHANNEL_IN_TOP_RIGHT) s.append("top-right, ");
if (mask & AUDIO_CHANNEL_IN_VOICE_UPLINK) s.append("voice-uplink, ");
@@ -2568,7 +2568,7 @@
}
track->mResetDone = false;
- track->mPresentationCompleteFrames = 0;
+ track->resetPresentationComplete();
mActiveTracks.add(track);
sp<EffectChain> chain = getEffectChain_l(track->sessionId());
if (chain != 0) {
@@ -5958,16 +5958,8 @@
track->isStopping_2() || track->isPaused()) {
// We have consumed all the buffers of this track.
// Remove it from the list of active tracks.
- size_t audioHALFrames;
- if (audio_has_proportional_frames(mFormat)) {
- audioHALFrames = (latency_l() * mSampleRate) / 1000;
- } else {
- audioHALFrames = 0;
- }
-
- int64_t framesWritten = mBytesWritten / mFrameSize;
if (mStandby || !last ||
- track->presentationComplete(framesWritten, audioHALFrames) ||
+ track->presentationComplete(latency_l()) ||
track->isPaused() || mHwPaused) {
if (track->isStopping_2()) {
track->mState = TrackBase::STOPPED;
@@ -6541,14 +6533,7 @@
// Drain has completed or we are in standby, signal presentation complete
if (!(mDrainSequence & 1) || !last || mStandby) {
track->mState = TrackBase::STOPPED;
- uint32_t latency = 0;
- status_t result = mOutput->stream->getLatency(&latency);
- ALOGE_IF(result != OK,
- "Error when retrieving output stream latency: %d", result);
- size_t audioHALFrames = (latency * mSampleRate) / 1000;
- int64_t framesWritten =
- mBytesWritten / mOutput->getFrameSize();
- track->presentationComplete(framesWritten, audioHALFrames);
+ track->presentationComplete(latency_l());
track->reset();
tracksToRemove->add(track);
// OFFLOADED stop resets frame counts.
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 4353b3d..e98a1a1 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -632,7 +632,6 @@
mMainBuffer(thread->sinkBuffer()),
mAuxBuffer(NULL),
mAuxEffectId(0), mHasVolumeController(false),
- mPresentationCompleteFrames(0),
mFrameMap(16 /* sink-frame-to-track-frame map memory */),
mVolumeHandler(new media::VolumeHandler(sampleRate)),
mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
@@ -1046,6 +1045,8 @@
reset();
}
+ // clear mPauseHwPending because of pause (and possibly flush) during underrun.
+ mPauseHwPending = false;
if (state == PAUSED || state == PAUSING) {
if (mResumeToStopping) {
// happened we need to resume to STOPPING_1
@@ -1431,6 +1432,7 @@
mAuxBuffer = buffer;
}
+// presentationComplete verified by frames, used by Mixed tracks.
bool AudioFlinger::PlaybackThread::Track::presentationComplete(
int64_t framesWritten, size_t audioHalFrames)
{
@@ -1449,30 +1451,71 @@
(long long)mPresentationCompleteFrames, (long long)framesWritten);
if (mPresentationCompleteFrames == 0) {
mPresentationCompleteFrames = framesWritten + audioHalFrames;
- ALOGV("%s(%d): presentationComplete() reset:"
+ ALOGV("%s(%d): set:"
" mPresentationCompleteFrames %lld audioHalFrames %zu",
__func__, mId,
(long long)mPresentationCompleteFrames, audioHalFrames);
}
bool complete;
- if (isOffloaded()) {
- complete = true;
- } else if (isDirect() || isFastTrack()) { // these do not go through linear map
+ if (isFastTrack()) { // does not go through linear map
complete = framesWritten >= (int64_t) mPresentationCompleteFrames;
+ ALOGV("%s(%d): %s framesWritten:%lld mPresentationCompleteFrames:%lld",
+ __func__, mId, (complete ? "complete" : "waiting"),
+ (long long) framesWritten, (long long) mPresentationCompleteFrames);
} else { // Normal tracks, OutputTracks, and PatchTracks
complete = framesWritten >= (int64_t) mPresentationCompleteFrames
&& mAudioTrackServerProxy->isDrained();
}
if (complete) {
- triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
- mAudioTrackServerProxy->setStreamEndDone();
+ notifyPresentationComplete();
return true;
}
return false;
}
+// presentationComplete checked by time, used by DirectTracks.
+bool AudioFlinger::PlaybackThread::Track::presentationComplete(uint32_t latencyMs)
+{
+ // For Offloaded or Direct tracks.
+
+ // For a direct track, we incorporated time based testing for presentationComplete.
+
+ // For an offloaded track the HAL+h/w delay is variable so a HAL drain() is used
+ // to detect when all frames have been played. In this case latencyMs isn't
+ // useful because it doesn't always reflect whether there is data in the h/w
+ // buffers, particularly if a track has been paused and resumed during draining
+
+ // Scaling exists on internal branch.
+ //constexpr float MIN_SPEED = 0.125f; // min speed scaling allowed for timely response.
+ if (mPresentationCompleteTimeNs == 0) {
+ mPresentationCompleteTimeNs = systemTime() + latencyMs * 1e6; // / fmax(mSpeed, MIN_SPEED);
+ ALOGV("%s(%d): set: latencyMs %u mPresentationCompleteTimeNs:%lld",
+ __func__, mId, latencyMs, (long long) mPresentationCompleteTimeNs);
+ }
+
+ bool complete;
+ if (isOffloaded()) {
+ complete = true;
+ } else { // Direct
+ complete = systemTime() >= mPresentationCompleteTimeNs;
+ ALOGV("%s(%d): %s", __func__, mId, (complete ? "complete" : "waiting"));
+ }
+ if (complete) {
+ notifyPresentationComplete();
+ return true;
+ }
+ return false;
+}
+
+void AudioFlinger::PlaybackThread::Track::notifyPresentationComplete()
+{
+ // This only triggers once. TODO: should we enforce this?
+ triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
+ mAudioTrackServerProxy->setStreamEndDone();
+}
+
void AudioFlinger::PlaybackThread::Track::triggerEvents(AudioSystem::sync_event_t type)
{
for (size_t i = 0; i < mSyncEvents.size();) {
diff --git a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
index b5885c0..76c35c1 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildPolicyCriterionTypes.py
@@ -200,6 +200,11 @@
#
ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
+ #
+ # Reaching 32 bit limit for inclusive criterion out devices: removing
+ #
+ ignored_output_device_values = ['BleSpeaker', 'BleHeadset']
+
criteria_pattern = re.compile(
r"\s*V\((?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
@@ -235,7 +240,9 @@
if criterion_name == "OutputDevicesMaskType":
if criterion_literal == "Default":
criterion_numerical_value = str(int("0x40000000", 0))
-
+ if criterion_literal in ignored_output_device_values:
+ logging.info("OutputDevicesMaskType skipping {}".format(criterion_literal))
+ continue
try:
string_int = int(criterion_numerical_value, 0)
except ValueError: