Merge "libeffects: Replace LoadConst_Float with memset for 0 loading."
diff --git a/apex/Android.bp b/apex/Android.bp
index 4d194bd..dabf4c2 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -64,6 +64,9 @@
// - build artifacts (lib/javalib/bin) against Android 10 SDK
// so that the artifacts can run.
min_sdk_version: "29",
+ // Indicates that pre-installed version of this apex can be compressed.
+ // Whether it actually will be compressed is controlled on per-device basis.
+ compressible: true,
}
apex {
@@ -120,6 +123,9 @@
// - build artifacts (lib/javalib/bin) against Android 10 SDK
// so that the artifacts can run.
min_sdk_version: "29",
+ // Indicates that pre-installed version of this apex can be compressed.
+ // Whether it actually will be compressed is controlled on per-device basis.
+ compressible: true,
}
prebuilt_etc {
diff --git a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
index a537e63..7c6d86c 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
+++ b/drm/mediacas/plugins/clearkey/ClearKeySessionLibrary.h
@@ -22,7 +22,6 @@
#include <openssl/aes.h>
#include <utils/KeyedVector.h>
#include <utils/Mutex.h>
-#include <utils/RefBase.h>
namespace android {
struct ABuffer;
@@ -30,7 +29,7 @@
namespace clearkeycas {
class KeyFetcher;
-class ClearKeyCasSession : public RefBase {
+class ClearKeyCasSession {
public:
explicit ClearKeyCasSession(CasPlugin *plugin);
diff --git a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
index 1b8b8c1..6ac3510 100644
--- a/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/default/DrmPlugin.cpp
@@ -109,6 +109,7 @@
}
void DrmPlugin::setPlayPolicy() {
+ android::Mutex::Autolock lock(mPlayPolicyLock);
mPlayPolicy.clear();
mPlayPolicy.add(kQueryKeyLicenseType, kStreaming);
mPlayPolicy.add(kQueryKeyPlayAllowed, kTrue);
diff --git a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
index 4fa42e5..aa9b59d 100644
--- a/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/default/include/DrmPlugin.h
@@ -262,6 +262,7 @@
void initProperties();
void setPlayPolicy();
+ android::Mutex mPlayPolicyLock;
android::KeyedVector<String8, String8> mPlayPolicy;
android::KeyedVector<String8, String8> mStringProperties;
android::KeyedVector<String8, Vector<uint8_t>> mByteArrayProperties;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
index 1495703..d278633 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/CryptoPlugin.cpp
@@ -119,7 +119,11 @@
return Void();
}
- if (source.offset + offset + source.size > sourceBase->getSize()) {
+ size_t totalSize = 0;
+ if (__builtin_add_overflow(source.offset, offset, &totalSize) ||
+ __builtin_add_overflow(totalSize, source.size, &totalSize) ||
+ totalSize > sourceBase->getSize()) {
+ android_errorWriteLog(0x534e4554, "176496160");
_hidl_cb(Status_V1_2::ERROR_DRM_CANNOT_HANDLE, 0, "invalid buffer size");
return Void();
}
diff --git a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
index f87f830..a77759e 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/hidl/DrmPlugin.cpp
@@ -304,6 +304,7 @@
}
void DrmPlugin::setPlayPolicy() {
+ android::Mutex::Autolock lock(mPlayPolicyLock);
mPlayPolicy.clear();
KeyValue policy;
diff --git a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
index 3de7589..076beb8 100644
--- a/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/hidl/include/DrmPlugin.h
@@ -406,6 +406,7 @@
int64_t mCloseSessionOkCount;
int64_t mCloseSessionNotOpenedCount;
uint32_t mNextSecureStopId;
+ android::Mutex mPlayPolicyLock;
// set by property to mock error scenarios
Status_V1_2 mMockError;
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 9cabd8b..200e92d 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -182,6 +182,7 @@
// This is set by AudioTrack.setBufferSizeInFrames().
// A write will not fill the buffer above this limit.
volatile uint32_t mBufferSizeInFrames; // effective size of the buffer
+ volatile uint32_t mStartThresholdInFrames; // min frames in buffer to start streaming
public:
@@ -216,6 +217,8 @@
};
size_t frameCount() const { return mFrameCount; }
+ uint32_t getStartThresholdInFrames() const;
+ uint32_t setStartThresholdInFrames(uint32_t startThresholdInFrames);
protected:
// These refer to shared memory, and are virtual addresses with respect to the current process.
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 3e6b0ff..332696d 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -55,6 +55,8 @@
namespace android {
constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+constexpr size_t kDefaultOutputPortDelay = 2;
+constexpr size_t kMaxOutputPortDelay = 16;
class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
@@ -73,7 +75,9 @@
addParameter(
DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
- .withConstValue(new C2PortActualDelayTuning::output(2u))
+ .withDefault(new C2PortActualDelayTuning::output(kDefaultOutputPortDelay))
+ .withFields({C2F(mActualOutputDelay, value).inRange(0, kMaxOutputPortDelay)})
+ .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
.build());
addParameter(
@@ -263,6 +267,7 @@
mAACDecoder(nullptr),
mStreamInfo(nullptr),
mSignalledError(false),
+ mOutputPortDelay(kDefaultOutputPortDelay),
mOutputDelayRingBuffer(nullptr) {
}
@@ -915,6 +920,29 @@
int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+ size_t numSamplesInOutput = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (numSamplesInOutput > 0) {
+ size_t actualOutputPortDelay = (outputDelay + numSamplesInOutput - 1) / numSamplesInOutput;
+ if (actualOutputPortDelay > mOutputPortDelay) {
+ mOutputPortDelay = actualOutputPortDelay;
+ ALOGV("New Output port delay %zu ", mOutputPortDelay);
+
+ C2PortActualDelayTuning::output outputPortDelay(mOutputPortDelay);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err =
+ mIntf->config({&outputPortDelay}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(outputPortDelay));
+ } else {
+ ALOGE("Cannot set output delay");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ }
mBuffersInfo.push_back(std::move(inInfo));
work->workletsProcessed = 0u;
if (!eos && mOutputDelayCompensated < outputDelay) {
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index 965c29e..986187c 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -57,6 +57,7 @@
size_t mInputBufferCount;
size_t mOutputBufferCount;
bool mSignalledError;
+ size_t mOutputPortDelay;
struct Info {
uint64_t frameIndex;
size_t bufferSize;
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index ea76cbb..d865ab2 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -272,8 +272,9 @@
return UNKNOWN_ERROR;
}
- if (sbrMode != -1 && aacProfile == C2Config::PROFILE_AAC_ELD) {
- if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, sbrMode)) {
+ if (sbrMode != C2Config::AAC_SBR_AUTO && aacProfile == C2Config::PROFILE_AAC_ELD) {
+ int aacSbrMode = sbrMode != C2Config::AAC_SBR_OFF;
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, aacSbrMode)) {
ALOGE("Failed to set AAC encoder parameters");
return UNKNOWN_ERROR;
}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 0207311..e8287f9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -26,7 +26,6 @@
#include <SimpleC2Interface.h>
#include "C2SoftAvcDec.h"
-#include "ih264d.h"
namespace android {
@@ -391,12 +390,14 @@
}
while (true) {
- ivd_video_decode_ip_t s_decode_ip;
- ivd_video_decode_op_t s_decode_op;
+ ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+ ih264d_video_decode_op_t s_h264d_decode_op = {};
+ ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+ ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
- setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- if (0 == s_decode_op.u4_output_present) {
+ setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+ (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+ if (0 == ps_decode_op->u4_output_present) {
resetPlugin();
break;
}
@@ -411,8 +412,8 @@
}
status_t C2SoftAvcDec::createDecoder() {
- ivdext_create_ip_t s_create_ip;
- ivdext_create_op_t s_create_op;
+ ivdext_create_ip_t s_create_ip = {};
+ ivdext_create_op_t s_create_op = {};
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -438,8 +439,8 @@
}
status_t C2SoftAvcDec::setNumCores() {
- ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
- ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+ ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+ ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -458,22 +459,26 @@
}
status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
- ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
- ivd_ctl_set_config_op_t s_set_dyn_params_op;
+ ih264d_ctl_set_config_ip_t s_h264d_set_dyn_params_ip = {};
+ ih264d_ctl_set_config_op_t s_h264d_set_dyn_params_op = {};
+ ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+ &s_h264d_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+ ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+ &s_h264d_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
- s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
- s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
- s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
- s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
- s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
- s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
- s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
- s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+ ps_set_dyn_params_ip->u4_size = sizeof(ih264d_ctl_set_config_ip_t);
+ ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+ ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+ ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+ ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+ ps_set_dyn_params_op->u4_size = sizeof(ih264d_ctl_set_config_op_t);
IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
- &s_set_dyn_params_ip,
- &s_set_dyn_params_op);
+ &s_h264d_set_dyn_params_ip,
+ &s_h264d_set_dyn_params_op);
if (status != IV_SUCCESS) {
- ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+ ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
return UNKNOWN_ERROR;
}
@@ -481,8 +486,8 @@
}
void C2SoftAvcDec::getVersion() {
- ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
- ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+ ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+ ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
UWORD8 au1_buf[512];
s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -538,7 +543,7 @@
if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
}
- ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_decode_ip->u4_size = sizeof(ih264d_video_decode_ip_t);
ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
if (inBuffer) {
ps_decode_ip->u4_ts = tsMarker;
@@ -567,14 +572,14 @@
ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
}
ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
- ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+ ps_decode_op->u4_size = sizeof(ih264d_video_decode_op_t);
return true;
}
bool C2SoftAvcDec::getVuiParams() {
- ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
- ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+ ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+ ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -622,8 +627,8 @@
}
status_t C2SoftAvcDec::setFlushMode() {
- ivd_ctl_flush_ip_t s_set_flush_ip;
- ivd_ctl_flush_op_t s_set_flush_op;
+ ivd_ctl_flush_ip_t s_set_flush_ip = {};
+ ivd_ctl_flush_op_t s_set_flush_op = {};
s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -641,8 +646,8 @@
}
status_t C2SoftAvcDec::resetDecoder() {
- ivd_ctl_reset_ip_t s_reset_ip;
- ivd_ctl_reset_op_t s_reset_op;
+ ivd_ctl_reset_ip_t s_reset_ip = {};
+ ivd_ctl_reset_op_t s_reset_op = {};
s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -671,8 +676,8 @@
status_t C2SoftAvcDec::deleteDecoder() {
if (mDecHandle) {
- ivdext_delete_ip_t s_delete_ip;
- ivdext_delete_op_t s_delete_op;
+ ivdext_delete_ip_t s_delete_ip = {};
+ ivdext_delete_op_t s_delete_op = {};
s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -837,8 +842,10 @@
return;
}
- ivd_video_decode_ip_t s_decode_ip;
- ivd_video_decode_op_t s_decode_op;
+ ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+ ih264d_video_decode_op_t s_h264d_decode_op = {};
+ ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+ ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
{
C2GraphicView wView = mOutBlock->map().get();
if (wView.error()) {
@@ -846,7 +853,7 @@
work->result = wView.error();
return;
}
- if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+ if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
inOffset + inPos, inSize - inPos, workIndex)) {
mSignalledError = true;
work->workletsProcessed = 1u;
@@ -862,26 +869,27 @@
WORD32 delay;
GETTIME(&mTimeStart, nullptr);
TIME_DIFF(mTimeEnd, mTimeStart, delay);
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
WORD32 decodeTime;
GETTIME(&mTimeEnd, nullptr);
TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
- s_decode_op.u4_num_bytes_consumed);
+ ps_decode_op->u4_num_bytes_consumed);
}
- if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+ if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGE("allocation failure in decoder");
mSignalledError = true;
work->workletsProcessed = 1u;
work->result = C2_CORRUPTED;
return;
- } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+ } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
+ (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
mSignalledError = true;
work->workletsProcessed = 1u;
work->result = C2_CORRUPTED;
return;
- } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+ } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGV("resolution changed");
drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
resetDecoder();
@@ -890,16 +898,16 @@
/* Decode header and get new dimensions */
setParams(mStride, IVD_DECODE_HEADER);
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
- ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+ (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+ } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+ ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
mSignalledError = true;
work->workletsProcessed = 1u;
work->result = C2_CORRUPTED;
return;
}
- if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
- mOutputDelay = s_decode_op.i4_reorder_depth;
+ if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+ mOutputDelay = ps_decode_op->i4_reorder_depth;
ALOGV("New Output delay %d ", mOutputDelay);
C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -917,16 +925,16 @@
return;
}
}
- if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+ if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- mStride = ALIGN32(s_decode_op.u4_pic_wd);
+ mStride = ALIGN32(ps_decode_op->u4_pic_wd);
setParams(mStride, IVD_DECODE_FRAME);
}
- if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
- mWidth = s_decode_op.u4_pic_wd;
- mHeight = s_decode_op.u4_pic_ht;
- CHECK_EQ(0u, s_decode_op.u4_output_present);
+ if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
+ mWidth = ps_decode_op->u4_pic_wd;
+ mHeight = ps_decode_op->u4_pic_ht;
+ CHECK_EQ(0u, ps_decode_op->u4_output_present);
C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -945,11 +953,11 @@
}
}
(void)getVuiParams();
- hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
- if (s_decode_op.u4_output_present) {
- finishWork(s_decode_op.u4_ts, work);
+ hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+ if (ps_decode_op->u4_output_present) {
+ finishWork(ps_decode_op->u4_ts, work);
}
- inPos += s_decode_op.u4_num_bytes_consumed;
+ inPos += ps_decode_op->u4_num_bytes_consumed;
}
if (eos) {
drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -987,16 +995,18 @@
ALOGE("graphic view map failed %d", wView.error());
return C2_CORRUPTED;
}
- ivd_video_decode_ip_t s_decode_ip;
- ivd_video_decode_op_t s_decode_op;
- if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+ ih264d_video_decode_ip_t s_h264d_decode_ip = {};
+ ih264d_video_decode_op_t s_h264d_decode_op = {};
+ ivd_video_decode_ip_t *ps_decode_ip = &s_h264d_decode_ip.s_ivd_video_decode_ip_t;
+ ivd_video_decode_op_t *ps_decode_op = &s_h264d_decode_op.s_ivd_video_decode_op_t;
+ if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
mSignalledError = true;
work->workletsProcessed = 1u;
return C2_CORRUPTED;
}
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- if (s_decode_op.u4_output_present) {
- finishWork(s_decode_op.u4_ts, work);
+ (void) ivdec_api_function(mDecHandle, &s_h264d_decode_ip, &s_h264d_decode_op);
+ if (ps_decode_op->u4_output_present) {
+ finishWork(ps_decode_op->u4_ts, work);
} else {
fillEmptyWork(work);
break;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
index bd84de0..5c07d29 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.h
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -25,8 +25,7 @@
#include <SimpleC2Component.h>
#include "ih264_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ih264d.h"
namespace android {
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index cfaeb66..fc5b75d 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -19,6 +19,8 @@
#include <log/log.h>
#include <utils/misc.h>
+#include <algorithm>
+
#include <media/hardware/VideoAPI.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
@@ -121,6 +123,19 @@
.build());
addParameter(
+ DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
+ .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
+ 0 /* flexCount */, 0u /* stream */))
+ .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
+ {C2Config::picture_type_t(I_FRAME),
+ C2Config::picture_type_t(P_FRAME),
+ C2Config::picture_type_t(B_FRAME)}),
+ C2F(mPictureQuantization, m.values[0].min).any(),
+ C2F(mPictureQuantization, m.values[0].max).any()})
+ .withSetter(PictureQuantizationSetter)
+ .build());
+
+ addParameter(
DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
.withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
.withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
@@ -220,6 +235,7 @@
return res;
}
+
static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
C2P<C2StreamPictureSizeInfo::input> &me) {
(void)mayBlock;
@@ -336,6 +352,13 @@
return C2R::Ok();
}
+ static C2R PictureQuantizationSetter(bool mayBlock,
+ C2P<C2StreamPictureQuantizationTuning::output> &me) {
+ (void)mayBlock;
+ (void)me;
+ return C2R::Ok();
+ }
+
IV_PROFILE_T getProfile_l() const {
switch (mProfileLevel->profile) {
case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]];
@@ -393,6 +416,8 @@
std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
+ { return mPictureQuantization; }
private:
std::shared_ptr<C2StreamUsageTuning::input> mUsage;
@@ -404,6 +429,7 @@
std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
std::shared_ptr<C2StreamGopTuning::output> mGop;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
};
#define ive_api_function ih264e_api_function
@@ -428,11 +454,19 @@
} // namespace
+static IV_COLOR_FORMAT_T GetIvColorFormat() {
+ static IV_COLOR_FORMAT_T sColorFormat =
+ (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_UV) ? IV_YUV_420SP_UV :
+ (GetYuv420FlexibleLayout() == FLEX_LAYOUT_SEMIPLANAR_VU) ? IV_YUV_420SP_VU :
+ IV_YUV_420P;
+ return sColorFormat;
+}
+
C2SoftAvcEnc::C2SoftAvcEnc(
const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
: SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
mIntf(intfImpl),
- mIvVideoColorFormat(IV_YUV_420P),
+ mIvVideoColorFormat(GetIvColorFormat()),
mAVCEncProfile(IV_PROFILE_BASE),
mAVCEncLevel(41),
mStarted(false),
@@ -664,20 +698,67 @@
ive_ctl_set_qp_op_t s_qp_op;
IV_STATUS_T status;
+ ALOGV("in setQp()");
+
+ // set the defaults
s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
- s_qp_ip.u4_i_qp = DEFAULT_I_QP;
- s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
- s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+ // these are the ones we're going to set, so want them to default ....
+ // to the DEFAULT values for the codec instea dof CODEC_ bounding
+ int32_t iMin = INT32_MIN, pMin = INT32_MIN, bMin = INT32_MIN;
+ int32_t iMax = INT32_MAX, pMax = INT32_MAX, bMax = INT32_MAX;
- s_qp_ip.u4_p_qp = DEFAULT_P_QP;
- s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
- s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+ std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
+ mIntf->getPictureQuantization_l();
+ for (size_t i = 0; i < qp->flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = qp->m.values[i];
- s_qp_ip.u4_b_qp = DEFAULT_P_QP;
- s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
- s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layer.max;
+ iMin = layer.min;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layer.max;
+ pMin = layer.min;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+ bMax = layer.max;
+ bMin = layer.min;
+ ALOGV("bMin %d bMax %d", bMin, bMax);
+ }
+ }
+
+ // INT32_{MIN,MAX} means unspecified, so use the codec's default
+ if (iMax == INT32_MAX) iMax = DEFAULT_I_QP_MAX;
+ if (iMin == INT32_MIN) iMin = DEFAULT_I_QP_MIN;
+ if (pMax == INT32_MAX) pMax = DEFAULT_P_QP_MAX;
+ if (pMin == INT32_MIN) pMin = DEFAULT_P_QP_MIN;
+ if (bMax == INT32_MAX) bMax = DEFAULT_B_QP_MAX;
+ if (bMin == INT32_MIN) bMin = DEFAULT_B_QP_MIN;
+
+ // ensure we have legal values
+ iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
+ iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
+ pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
+ pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
+ bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
+ bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+
+ s_qp_ip.u4_i_qp_max = iMax;
+ s_qp_ip.u4_i_qp_min = iMin;
+ s_qp_ip.u4_p_qp_max = pMax;
+ s_qp_ip.u4_p_qp_min = pMin;
+ s_qp_ip.u4_b_qp_max = bMax;
+ s_qp_ip.u4_b_qp_min = bMin;
+
+ // ensure initial qp values are within our newly configured bounds...
+ s_qp_ip.u4_i_qp = std::clamp(DEFAULT_I_QP, iMin, iMax);
+ s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
+ s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
+
+ ALOGV("setting QP: i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
+
s_qp_ip.u4_timestamp_high = -1;
s_qp_ip.u4_timestamp_low = -1;
@@ -953,8 +1034,7 @@
// Assume worst case output buffer size to be equal to number of bytes in input
mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
- // TODO
- mIvVideoColorFormat = IV_YUV_420P;
+ mIvVideoColorFormat = GetIvColorFormat();
ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
@@ -1009,29 +1089,31 @@
/* Getting MemRecords Attributes */
{
- iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
- iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+ ih264e_fill_mem_rec_ip_t s_ih264e_mem_rec_ip = {};
+ ih264e_fill_mem_rec_op_t s_ih264e_mem_rec_op = {};
+ iv_fill_mem_rec_ip_t *ps_fill_mem_rec_ip = &s_ih264e_mem_rec_ip.s_ive_ip;
+ iv_fill_mem_rec_op_t *ps_fill_mem_rec_op = &s_ih264e_mem_rec_op.s_ive_op;
- s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
- s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+ ps_fill_mem_rec_ip->u4_size = sizeof(ih264e_fill_mem_rec_ip_t);
+ ps_fill_mem_rec_op->u4_size = sizeof(ih264e_fill_mem_rec_op_t);
- s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
- s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
- s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
- s_fill_mem_rec_ip.u4_max_wd = width;
- s_fill_mem_rec_ip.u4_max_ht = height;
- s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
- s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
- s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
- s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
- s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
- s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ ps_fill_mem_rec_ip->e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ ps_fill_mem_rec_ip->ps_mem_rec = mMemRecords;
+ ps_fill_mem_rec_ip->u4_num_mem_rec = mNumMemRecords;
+ ps_fill_mem_rec_ip->u4_max_wd = width;
+ ps_fill_mem_rec_ip->u4_max_ht = height;
+ ps_fill_mem_rec_ip->u4_max_level = mAVCEncLevel;
+ ps_fill_mem_rec_ip->e_color_format = DEFAULT_INP_COLOR_FORMAT;
+ ps_fill_mem_rec_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ ps_fill_mem_rec_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ ps_fill_mem_rec_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ ps_fill_mem_rec_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
- status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+ status = ive_api_function(nullptr, &s_ih264e_mem_rec_ip, &s_ih264e_mem_rec_op);
if (status != IV_SUCCESS) {
ALOGE("Fill memory records failed = 0x%x\n",
- s_fill_mem_rec_op.u4_error_code);
+ ps_fill_mem_rec_op->u4_error_code);
return C2_CORRUPTED;
}
}
@@ -1060,48 +1142,51 @@
/* Codec Instance Creation */
{
- ive_init_ip_t s_init_ip;
- ive_init_op_t s_init_op;
+ ih264e_init_ip_t s_enc_ip = {};
+ ih264e_init_op_t s_enc_op = {};
+
+ ive_init_ip_t *ps_init_ip = &s_enc_ip.s_ive_ip;
+ ive_init_op_t *ps_init_op = &s_enc_op.s_ive_op;
mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
mCodecCtx->u4_size = sizeof(iv_obj_t);
mCodecCtx->pv_fxns = (void *)ive_api_function;
- s_init_ip.u4_size = sizeof(ive_init_ip_t);
- s_init_op.u4_size = sizeof(ive_init_op_t);
+ ps_init_ip->u4_size = sizeof(ih264e_init_ip_t);
+ ps_init_op->u4_size = sizeof(ih264e_init_op_t);
- s_init_ip.e_cmd = IV_CMD_INIT;
- s_init_ip.u4_num_mem_rec = mNumMemRecords;
- s_init_ip.ps_mem_rec = mMemRecords;
- s_init_ip.u4_max_wd = width;
- s_init_ip.u4_max_ht = height;
- s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
- s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
- s_init_ip.u4_max_level = mAVCEncLevel;
- s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+ ps_init_ip->e_cmd = IV_CMD_INIT;
+ ps_init_ip->u4_num_mem_rec = mNumMemRecords;
+ ps_init_ip->ps_mem_rec = mMemRecords;
+ ps_init_ip->u4_max_wd = width;
+ ps_init_ip->u4_max_ht = height;
+ ps_init_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ ps_init_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ ps_init_ip->u4_max_level = mAVCEncLevel;
+ ps_init_ip->e_inp_color_fmt = mIvVideoColorFormat;
if (mReconEnable || mPSNREnable) {
- s_init_ip.u4_enable_recon = 1;
+ ps_init_ip->u4_enable_recon = 1;
} else {
- s_init_ip.u4_enable_recon = 0;
+ ps_init_ip->u4_enable_recon = 0;
}
- s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
- s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
- s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
- s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
- s_init_ip.u4_num_bframes = mBframes;
- s_init_ip.e_content_type = IV_PROGRESSIVE;
- s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
- s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
- s_init_ip.e_slice_mode = mSliceMode;
- s_init_ip.u4_slice_param = mSliceParam;
- s_init_ip.e_arch = mArch;
- s_init_ip.e_soc = DEFAULT_SOC;
+ ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+ ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
+ ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+ ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
+ ps_init_ip->u4_num_bframes = mBframes;
+ ps_init_ip->e_content_type = IV_PROGRESSIVE;
+ ps_init_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ ps_init_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ ps_init_ip->e_slice_mode = mSliceMode;
+ ps_init_ip->u4_slice_param = mSliceParam;
+ ps_init_ip->e_arch = mArch;
+ ps_init_ip->e_soc = DEFAULT_SOC;
- status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+ status = ive_api_function(mCodecCtx, &s_enc_ip, &s_enc_op);
if (status != IV_SUCCESS) {
- ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+ ALOGE("Init encoder failed = 0x%x\n", ps_init_op->u4_error_code);
return C2_CORRUPTED;
}
}
@@ -1247,7 +1332,6 @@
mSize->width, input->height(), mSize->height);
return C2_BAD_VALUE;
}
- ALOGV("width = %d, height = %d", input->width(), input->height());
const C2PlanarLayout &layout = input->layout();
uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
@@ -1284,7 +1368,8 @@
return C2_BAD_VALUE;
}
- if (layout.planes[layout.PLANE_Y].colInc == 1
+ if (mIvVideoColorFormat == IV_YUV_420P
+ && layout.planes[layout.PLANE_Y].colInc == 1
&& layout.planes[layout.PLANE_U].colInc == 1
&& layout.planes[layout.PLANE_V].colInc == 1
&& uStride == vStride
@@ -1292,21 +1377,61 @@
// I420 compatible - already set up above
break;
}
+ if (mIvVideoColorFormat == IV_YUV_420SP_UV
+ && layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 2
+ && layout.planes[layout.PLANE_V].colInc == 2
+ && uStride == vStride
+ && yStride == vStride
+ && uPlane + 1 == vPlane) {
+ // NV12 compatible - already set up above
+ break;
+ }
+ if (mIvVideoColorFormat == IV_YUV_420SP_VU
+ && layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 2
+ && layout.planes[layout.PLANE_V].colInc == 2
+ && uStride == vStride
+ && yStride == vStride
+ && uPlane == vPlane + 1) {
+ // NV21 compatible - already set up above
+ break;
+ }
// copy to I420
yStride = width;
uStride = vStride = yStride / 2;
MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
- MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ MediaImage2 img;
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ break;
+ case IV_YUV_420SP_VU:
+ img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+ img.mPlane[MediaImage2::U].mOffset++;
+ img.mPlane[MediaImage2::V].mOffset--;
+ yPlane = conversionBuffer.data();
+ vPlane = yPlane + yPlaneSize;
+ uPlane = vPlane + 1;
+ break;
+ case IV_YUV_420SP_UV:
+ default:
+ img = CreateYUV420SemiPlanarMediaImage2(width, height, yStride, height);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + 1;
+ break;
+ }
status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
if (err != OK) {
ALOGE("Buffer conversion failed: %d", err);
return C2_BAD_VALUE;
}
- yPlane = conversionBuffer.data();
- uPlane = yPlane + yPlaneSize;
- vPlane = uPlane + yPlaneSize / 4;
break;
}
@@ -1328,13 +1453,13 @@
ps_inp_raw_buf->apv_bufs[1] = uPlane;
ps_inp_raw_buf->apv_bufs[2] = vPlane;
- ps_inp_raw_buf->au4_wd[0] = input->width();
- ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
- ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+ ps_inp_raw_buf->au4_wd[0] = mSize->width;
+ ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
+ ps_inp_raw_buf->au4_wd[2] = mSize->width / 2;
- ps_inp_raw_buf->au4_ht[0] = input->height();
- ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
- ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[0] = mSize->height;
+ ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
+ ps_inp_raw_buf->au4_ht[2] = mSize->height / 2;
ps_inp_raw_buf->au4_strd[0] = yStride;
ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1352,18 +1477,20 @@
break;
}
- case IV_YUV_420SP_UV:
case IV_YUV_420SP_VU:
+ uPlane = vPlane;
+ [[fallthrough]];
+ case IV_YUV_420SP_UV:
default:
{
ps_inp_raw_buf->apv_bufs[0] = yPlane;
ps_inp_raw_buf->apv_bufs[1] = uPlane;
- ps_inp_raw_buf->au4_wd[0] = input->width();
- ps_inp_raw_buf->au4_wd[1] = input->width();
+ ps_inp_raw_buf->au4_wd[0] = mSize->width;
+ ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
- ps_inp_raw_buf->au4_ht[0] = input->height();
- ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[0] = mSize->height;
+ ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
ps_inp_raw_buf->au4_strd[0] = yStride;
ps_inp_raw_buf->au4_strd[1] = uStride;
@@ -1429,15 +1556,17 @@
}
// while (!mSawOutputEOS && !outQueue.empty()) {
c2_status_t error;
- ive_video_encode_ip_t s_encode_ip;
- ive_video_encode_op_t s_encode_op;
- memset(&s_encode_op, 0, sizeof(s_encode_op));
+ ih264e_video_encode_ip_t s_video_encode_ip = {};
+ ih264e_video_encode_op_t s_video_encode_op = {};
+ ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+ ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+ memset(ps_encode_op, 0, sizeof(*ps_encode_op));
if (!mSpsPpsHeaderReceived) {
constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
uint8_t header[kHeaderLength];
error = setEncodeArgs(
- &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, workIndex);
+ ps_encode_ip, ps_encode_op, nullptr, header, kHeaderLength, workIndex);
if (error != C2_OK) {
ALOGE("setEncodeArgs failed: %d", error);
mSignalledError = true;
@@ -1445,22 +1574,22 @@
work->workletsProcessed = 1u;
return;
}
- status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+ status = ive_api_function(mCodecCtx, ps_encode_ip, ps_encode_op);
if (IV_SUCCESS != status) {
ALOGE("Encode header failed = 0x%x\n",
- s_encode_op.u4_error_code);
+ ps_encode_op->u4_error_code);
work->workletsProcessed = 1u;
return;
} else {
ALOGV("Bytes Generated in header %d\n",
- s_encode_op.s_out_buf.u4_bytes);
+ ps_encode_op->s_out_buf.u4_bytes);
}
mSpsPpsHeaderReceived = true;
std::unique_ptr<C2StreamInitDataInfo::output> csd =
- C2StreamInitDataInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+ C2StreamInitDataInfo::output::AllocUnique(ps_encode_op->s_out_buf.u4_bytes, 0u);
if (!csd) {
ALOGE("CSD allocation failed");
mSignalledError = true;
@@ -1468,7 +1597,7 @@
work->workletsProcessed = 1u;
return;
}
- memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+ memcpy(csd->m.value, header, ps_encode_op->s_out_buf.u4_bytes);
work->worklets.front()->output.configUpdate.push_back(std::move(csd));
DUMP_TO_FILE(
@@ -1562,7 +1691,7 @@
}
error = setEncodeArgs(
- &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
+ ps_encode_ip, ps_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
if (error != C2_OK) {
ALOGE("setEncodeArgs failed : %d", error);
mSignalledError = true;
@@ -1579,17 +1708,17 @@
/* Compute time elapsed between end of previous decode()
* to start of current decode() */
TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
- status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+ status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
if (IV_SUCCESS != status) {
- if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+ if ((ps_encode_op->u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
// TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
mOutBufferSize *= 2;
mOutBlock.reset();
continue;
}
ALOGE("Encode Frame failed = 0x%x\n",
- s_encode_op.u4_error_code);
+ ps_encode_op->u4_error_code);
mSignalledError = true;
work->result = C2_CORRUPTED;
work->workletsProcessed = 1u;
@@ -1599,7 +1728,7 @@
// Hold input buffer reference
if (inputBuffer) {
- mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer;
+ mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
}
GETTIME(&mTimeEnd, nullptr);
@@ -1607,9 +1736,9 @@
TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
- s_encode_op.s_out_buf.u4_bytes);
+ ps_encode_op->s_out_buf.u4_bytes);
- void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
/* If encoder frees up an input buffer, mark it as free */
if (freed != nullptr) {
if (mBuffers.count(freed) == 0u) {
@@ -1621,17 +1750,17 @@
}
}
- if (s_encode_op.output_present) {
- if (!s_encode_op.s_out_buf.u4_bytes) {
+ if (ps_encode_op->output_present) {
+ if (!ps_encode_op->s_out_buf.u4_bytes) {
ALOGE("Error: Output present but bytes generated is zero");
mSignalledError = true;
work->result = C2_CORRUPTED;
work->workletsProcessed = 1u;
return;
}
- uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
- s_encode_op.u4_timestamp_low;
- finishWork(workId, work, &s_encode_op);
+ uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+ ps_encode_op->u4_timestamp_low;
+ finishWork(workId, work, ps_encode_op);
}
if (mSawInputEOS) {
drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
@@ -1671,9 +1800,11 @@
ALOGE("graphic view map failed %d", wView.error());
return C2_CORRUPTED;
}
- ive_video_encode_ip_t s_encode_ip;
- ive_video_encode_op_t s_encode_op;
- if (C2_OK != setEncodeArgs(&s_encode_ip, &s_encode_op, nullptr,
+ ih264e_video_encode_ip_t s_video_encode_ip = {};
+ ih264e_video_encode_op_t s_video_encode_op = {};
+ ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
+ ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
+ if (C2_OK != setEncodeArgs(ps_encode_ip, ps_encode_op, nullptr,
wView.base(), wView.capacity(), 0)) {
ALOGE("setEncodeArgs failed for drainInternal");
mSignalledError = true;
@@ -1681,9 +1812,9 @@
work->workletsProcessed = 1u;
return C2_CORRUPTED;
}
- (void)ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+ (void)ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
- void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
/* If encoder frees up an input buffer, mark it as free */
if (freed != nullptr) {
if (mBuffers.count(freed) == 0u) {
@@ -1695,10 +1826,10 @@
}
}
- if (s_encode_op.output_present) {
- uint64_t workId = ((uint64_t)s_encode_op.u4_timestamp_high << 32) |
- s_encode_op.u4_timestamp_low;
- finishWork(workId, work, &s_encode_op);
+ if (ps_encode_op->output_present) {
+ uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
+ ps_encode_op->u4_timestamp_low;
+ finishWork(workId, work, ps_encode_op);
} else {
if (work->workletsProcessed != 1u) {
work->worklets.front()->output.flags = work->input.flags;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index 555055b..673a282 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -24,8 +24,7 @@
#include <SimpleC2Component.h>
#include "ih264_typedefs.h"
-#include "iv2.h"
-#include "ive2.h"
+#include "ih264e.h"
namespace android {
@@ -100,6 +99,11 @@
#define STRLENGTH 500
#define DEFAULT_CONSTRAINED_INTRA 0
+/** limits as specified by h264 */
+#define CODEC_QP_MIN 0
+#define CODEC_QP_MAX 51
+
+
#define MIN(a, b) ((a) < (b))? (a) : (b)
#define MAX(a, b) ((a) > (b))? (a) : (b)
#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index a374dfa..6bcf3a2 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -26,7 +26,6 @@
#include <SimpleC2Interface.h>
#include "C2SoftHevcDec.h"
-#include "ihevcd_cxa.h"
namespace android {
@@ -380,12 +379,14 @@
}
while (true) {
- ivd_video_decode_ip_t s_decode_ip;
- ivd_video_decode_op_t s_decode_op;
+ ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+ ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+ ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+ ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
- setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- if (0 == s_decode_op.u4_output_present) {
+ setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, nullptr, 0, 0, 0);
+ (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+ if (0 == ps_decode_op->u4_output_present) {
resetPlugin();
break;
}
@@ -400,8 +401,8 @@
}
status_t C2SoftHevcDec::createDecoder() {
- ivdext_create_ip_t s_create_ip;
- ivdext_create_op_t s_create_op;
+ ivdext_create_ip_t s_create_ip = {};
+ ivdext_create_op_t s_create_op = {};
s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
@@ -427,8 +428,8 @@
}
status_t C2SoftHevcDec::setNumCores() {
- ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
- ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+ ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip = {};
+ ivdext_ctl_set_num_cores_op_t s_set_num_cores_op = {};
s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -447,22 +448,26 @@
}
status_t C2SoftHevcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
- ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
- ivd_ctl_set_config_op_t s_set_dyn_params_op;
+ ihevcd_cxa_ctl_set_config_ip_t s_hevcd_set_dyn_params_ip = {};
+ ihevcd_cxa_ctl_set_config_op_t s_hevcd_set_dyn_params_op = {};
+ ivd_ctl_set_config_ip_t *ps_set_dyn_params_ip =
+ &s_hevcd_set_dyn_params_ip.s_ivd_ctl_set_config_ip_t;
+ ivd_ctl_set_config_op_t *ps_set_dyn_params_op =
+ &s_hevcd_set_dyn_params_op.s_ivd_ctl_set_config_op_t;
- s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
- s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
- s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
- s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
- s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
- s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
- s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
- s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+ ps_set_dyn_params_ip->u4_size = sizeof(ihevcd_cxa_ctl_set_config_ip_t);
+ ps_set_dyn_params_ip->e_cmd = IVD_CMD_VIDEO_CTL;
+ ps_set_dyn_params_ip->e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ ps_set_dyn_params_ip->u4_disp_wd = (UWORD32) stride;
+ ps_set_dyn_params_ip->e_frm_skip_mode = IVD_SKIP_NONE;
+ ps_set_dyn_params_ip->e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ ps_set_dyn_params_ip->e_vid_dec_mode = dec_mode;
+ ps_set_dyn_params_op->u4_size = sizeof(ihevcd_cxa_ctl_set_config_op_t);
IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
- &s_set_dyn_params_ip,
- &s_set_dyn_params_op);
+ ps_set_dyn_params_ip,
+ ps_set_dyn_params_op);
if (status != IV_SUCCESS) {
- ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+ ALOGE("error in %s: 0x%x", __func__, ps_set_dyn_params_op->u4_error_code);
return UNKNOWN_ERROR;
}
@@ -470,8 +475,8 @@
}
status_t C2SoftHevcDec::getVersion() {
- ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
- ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+ ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip = {};
+ ivd_ctl_getversioninfo_op_t s_get_versioninfo_op = {};
UWORD8 au1_buf[512];
s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
@@ -529,7 +534,7 @@
if (OK != setParams(mStride, IVD_DECODE_FRAME)) return false;
}
- ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_decode_ip->u4_size = sizeof(ihevcd_cxa_video_decode_ip_t);
ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
if (inBuffer) {
ps_decode_ip->u4_ts = tsMarker;
@@ -558,15 +563,15 @@
ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
}
ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
- ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+ ps_decode_op->u4_size = sizeof(ihevcd_cxa_video_decode_op_t);
ps_decode_op->u4_output_present = 0;
return true;
}
bool C2SoftHevcDec::getVuiParams() {
- ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
- ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+ ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip = {};
+ ivdext_ctl_get_vui_params_op_t s_get_vui_params_op = {};
s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -614,8 +619,8 @@
}
status_t C2SoftHevcDec::setFlushMode() {
- ivd_ctl_flush_ip_t s_set_flush_ip;
- ivd_ctl_flush_op_t s_set_flush_op;
+ ivd_ctl_flush_ip_t s_set_flush_ip = {};
+ ivd_ctl_flush_op_t s_set_flush_op = {};
s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -633,8 +638,8 @@
}
status_t C2SoftHevcDec::resetDecoder() {
- ivd_ctl_reset_ip_t s_reset_ip;
- ivd_ctl_reset_op_t s_reset_op;
+ ivd_ctl_reset_ip_t s_reset_ip = {};
+ ivd_ctl_reset_op_t s_reset_op = {};
s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
@@ -662,8 +667,8 @@
status_t C2SoftHevcDec::deleteDecoder() {
if (mDecHandle) {
- ivdext_delete_ip_t s_delete_ip;
- ivdext_delete_op_t s_delete_op;
+ ivdext_delete_ip_t s_delete_ip = {};
+ ivdext_delete_op_t s_delete_op = {};
s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
@@ -835,9 +840,11 @@
work->result = wView.error();
return;
}
- ivd_video_decode_ip_t s_decode_ip;
- ivd_video_decode_op_t s_decode_op;
- if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+ ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+ ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+ ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+ ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+ if (!setDecodeArgs(ps_decode_ip, ps_decode_op, &rView, &wView,
inOffset + inPos, inSize - inPos, workIndex)) {
mSignalledError = true;
work->workletsProcessed = 1u;
@@ -852,26 +859,26 @@
WORD32 delay;
GETTIME(&mTimeStart, nullptr);
TIME_DIFF(mTimeEnd, mTimeStart, delay);
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
WORD32 decodeTime;
GETTIME(&mTimeEnd, nullptr);
TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
- s_decode_op.u4_num_bytes_consumed);
- if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+ ps_decode_op->u4_num_bytes_consumed);
+ if (IVD_MEM_ALLOC_FAILED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGE("allocation failure in decoder");
mSignalledError = true;
work->workletsProcessed = 1u;
work->result = C2_CORRUPTED;
return;
} else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED ==
- (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+ (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
mSignalledError = true;
work->workletsProcessed = 1u;
work->result = C2_CORRUPTED;
return;
- } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & IVD_ERROR_MASK)) {
+ } else if (IVD_RES_CHANGED == (ps_decode_op->u4_error_code & IVD_ERROR_MASK)) {
ALOGV("resolution changed");
drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
resetDecoder();
@@ -880,16 +887,16 @@
/* Decode header and get new dimensions */
setParams(mStride, IVD_DECODE_HEADER);
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- } else if (IS_IVD_FATAL_ERROR(s_decode_op.u4_error_code)) {
- ALOGE("Fatal error in decoder 0x%x", s_decode_op.u4_error_code);
+ (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+ } else if (IS_IVD_FATAL_ERROR(ps_decode_op->u4_error_code)) {
+ ALOGE("Fatal error in decoder 0x%x", ps_decode_op->u4_error_code);
mSignalledError = true;
work->workletsProcessed = 1u;
work->result = C2_CORRUPTED;
return;
}
- if (s_decode_op.i4_reorder_depth >= 0 && mOutputDelay != s_decode_op.i4_reorder_depth) {
- mOutputDelay = s_decode_op.i4_reorder_depth;
+ if (ps_decode_op->i4_reorder_depth >= 0 && mOutputDelay != ps_decode_op->i4_reorder_depth) {
+ mOutputDelay = ps_decode_op->i4_reorder_depth;
ALOGV("New Output delay %d ", mOutputDelay);
C2PortActualDelayTuning::output outputDelay(mOutputDelay);
@@ -907,15 +914,15 @@
return;
}
}
- if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+ if (0 < ps_decode_op->u4_pic_wd && 0 < ps_decode_op->u4_pic_ht) {
if (mHeaderDecoded == false) {
mHeaderDecoded = true;
- setParams(ALIGN32(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+ setParams(ALIGN32(ps_decode_op->u4_pic_wd), IVD_DECODE_FRAME);
}
- if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
- mWidth = s_decode_op.u4_pic_wd;
- mHeight = s_decode_op.u4_pic_ht;
- CHECK_EQ(0u, s_decode_op.u4_output_present);
+ if (ps_decode_op->u4_pic_wd != mWidth || ps_decode_op->u4_pic_ht != mHeight) {
+ mWidth = ps_decode_op->u4_pic_wd;
+ mHeight = ps_decode_op->u4_pic_ht;
+ CHECK_EQ(0u, ps_decode_op->u4_output_present);
C2StreamPictureSizeInfo::output size(0u, mWidth, mHeight);
std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -935,15 +942,15 @@
}
}
(void) getVuiParams();
- hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
- if (s_decode_op.u4_output_present) {
- finishWork(s_decode_op.u4_ts, work);
+ hasPicture |= (1 == ps_decode_op->u4_frame_decoded_flag);
+ if (ps_decode_op->u4_output_present) {
+ finishWork(ps_decode_op->u4_ts, work);
}
- if (0 == s_decode_op.u4_num_bytes_consumed) {
+ if (0 == ps_decode_op->u4_num_bytes_consumed) {
ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
break;
}
- inPos += s_decode_op.u4_num_bytes_consumed;
+ inPos += ps_decode_op->u4_num_bytes_consumed;
if (hasPicture && (inSize - inPos)) {
ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
(int)inSize - (int)inPos);
@@ -985,16 +992,18 @@
ALOGE("graphic view map failed %d", wView.error());
return C2_CORRUPTED;
}
- ivd_video_decode_ip_t s_decode_ip;
- ivd_video_decode_op_t s_decode_op;
- if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+ ihevcd_cxa_video_decode_ip_t s_hevcd_decode_ip = {};
+ ihevcd_cxa_video_decode_op_t s_hevcd_decode_op = {};
+ ivd_video_decode_ip_t *ps_decode_ip = &s_hevcd_decode_ip.s_ivd_video_decode_ip_t;
+ ivd_video_decode_op_t *ps_decode_op = &s_hevcd_decode_op.s_ivd_video_decode_op_t;
+ if (!setDecodeArgs(ps_decode_ip, ps_decode_op, nullptr, &wView, 0, 0, 0)) {
mSignalledError = true;
work->workletsProcessed = 1u;
return C2_CORRUPTED;
}
- (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
- if (s_decode_op.u4_output_present) {
- finishWork(s_decode_op.u4_ts, work);
+ (void) ivdec_api_function(mDecHandle, ps_decode_ip, ps_decode_op);
+ if (ps_decode_op->u4_output_present) {
+ finishWork(ps_decode_op->u4_ts, work);
} else {
fillEmptyWork(work);
break;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h
index 600d7c1..b9b0a48 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.h
+++ b/media/codec2/components/hevc/C2SoftHevcDec.h
@@ -23,8 +23,7 @@
#include <SimpleC2Component.h>
#include "ihevc_typedefs.h"
-#include "iv.h"
-#include "ivd.h"
+#include "ihevcd_cxa.h"
namespace android {
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index fe37b05..a5d6fbf 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -642,7 +642,8 @@
* \retval C2_REFUSED no permission to complete the allocation
* \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
* \retval C2_OMITTED this allocator does not support 1D allocations
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+ * (unexpected)
*/
virtual c2_status_t newLinearAllocation(
uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -666,7 +667,8 @@
* \retval C2_REFUSED no permission to recreate the allocation
* \retval C2_BAD_VALUE invalid handle (caller error)
* \retval C2_OMITTED this allocator does not support 1D allocations
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+ * (unexpected)
*/
virtual c2_status_t priorLinearAllocation(
const C2Handle *handle __unused,
@@ -699,7 +701,8 @@
* \retval C2_REFUSED no permission to complete the allocation
* \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller error)
* \retval C2_OMITTED this allocator does not support 2D allocations
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during allocation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during allocation
+ * (unexpected)
*/
virtual c2_status_t newGraphicAllocation(
uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -724,7 +727,8 @@
* \retval C2_REFUSED no permission to recreate the allocation
* \retval C2_BAD_VALUE invalid handle (caller error)
* \retval C2_OMITTED this allocator does not support 2D allocations
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during recreation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during recreation
+ * (unexpected)
*/
virtual c2_status_t priorGraphicAllocation(
const C2Handle *handle __unused,
@@ -908,7 +912,8 @@
* \retval C2_REFUSED no permission to complete any required allocation
* \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
* \retval C2_OMITTED this pool does not support linear blocks
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+ * (unexpected)
*/
virtual c2_status_t fetchLinearBlock(
uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -937,7 +942,8 @@
* \retval C2_REFUSED no permission to complete any required allocation
* \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
* \retval C2_OMITTED this pool does not support circular blocks
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+ * (unexpected)
*/
virtual c2_status_t fetchCircularBlock(
uint32_t capacity __unused, C2MemoryUsage usage __unused,
@@ -969,7 +975,8 @@
* \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
* error)
* \retval C2_OMITTED this pool does not support 2D blocks
- * \retval C2_CORRUPTED some unknown, unrecoverable error occured during operation (unexpected)
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+ * (unexpected)
*/
virtual c2_status_t fetchGraphicBlock(
uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
@@ -980,6 +987,90 @@
}
virtual ~C2BlockPool() = default;
+
+ /**
+ * Blocking fetch for linear block. Obtains a linear writable block of given |capacity|
+ * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+ * |fence| is set to a null-fence and C2_OK is returned.
+ *
+ * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+ * is stored into |fence| and C2_BLOCKING is returned.
+ *
+ * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+ * fence is signalled when the temporary restriction on fetch is lifted.
+ * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+ *
+ * \param capacity the size of requested block.
+ * \param usage the memory usage info for the requested block. Returned blocks will be
+ * optimized for this usage, but may be used with any usage. One exception:
+ * protected blocks/buffers can only be used in a protected scenario.
+ * \param block pointer to where the obtained block shall be stored on success. nullptr will
+ * be stored here on failure
+ * \param fence pointer to where the fence shall be stored on C2_BLOCKING error.
+ *
+ * \retval C2_OK the operation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+ * \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_BLOCKING the operation is blocked
+ * \retval C2_REFUSED no permission to complete any required allocation
+ * \retval C2_BAD_VALUE capacity or usage are not supported (invalid) (caller error)
+ * \retval C2_OMITTED this pool does not support linear blocks nor fence.
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+ * (unexpected)
+ */
+ virtual c2_status_t fetchLinearBlock(
+ uint32_t capacity __unused, C2MemoryUsage usage __unused,
+ std::shared_ptr<C2LinearBlock> *block /* nonnull */,
+ C2Fence *fence /* nonnull */) {
+ *block = nullptr;
+ (void) fence;
+ return C2_OMITTED;
+ }
+
+ /**
+ * Blocking fetch for 2D graphic block. Obtains a 2D graphic writable block of given |capacity|
+ * and |usage|. If a block can be successfully obtained, the block is stored in |block|,
+ * |fence| is set to a null-fence and C2_OK is returned.
+ *
+ * If a block cannot be temporarily obtained, |block| is set to nullptr, a waitable fence
+ * is stored into |fence| and C2_BLOCKING is returned.
+ *
+ * Otherwise, |block| is set to nullptr and |fence| is set to a null-fence. The waitable
+ * fence is signalled when the temporary restriction on fetch is lifted.
+ * e.g. more memory is available to fetch because some meomory or prior blocks were released.
+ *
+ * \param width the width of requested block (the obtained block could be slightly larger, e.g.
+ * to accommodate any system-required alignment)
+ * \param height the height of requested block (the obtained block could be slightly larger,
+ * e.g. to accommodate any system-required alignment)
+ * \param format the pixel format of requested block. This could be a vendor specific format.
+ * \param usage the memory usage info for the requested block. Returned blocks will be
+ * optimized for this usage, but may be used with any usage. One exception:
+ * protected blocks/buffers can only be used in a protected scenario.
+ * \param block pointer to where the obtained block shall be stored on success. nullptr
+ * will be stored here on failure
+ * \param fence pointer to where the fence shall be stored on C2_BLOCKING error.
+ *
+ * \retval C2_OK the operation was successful
+ * \retval C2_NO_MEMORY not enough memory to complete any required allocation
+ * \retval C2_TIMED_OUT the operation timed out
+ * \retval C2_BLOCKING the operation is blocked
+ * \retval C2_REFUSED no permission to complete any required allocation
+ * \retval C2_BAD_VALUE width, height, format or usage are not supported (invalid) (caller
+ * error)
+ * \retval C2_OMITTED this pool does not support 2D blocks
+ * \retval C2_CORRUPTED some unknown, unrecoverable error occurred during operation
+ * (unexpected)
+ */
+ virtual c2_status_t fetchGraphicBlock(
+ uint32_t width __unused, uint32_t height __unused, uint32_t format __unused,
+ C2MemoryUsage usage __unused,
+ std::shared_ptr<C2GraphicBlock> *block /* nonnull */,
+ C2Fence *fence /* nonnull */) {
+ *block = nullptr;
+ (void) fence;
+ return C2_OMITTED;
+ }
protected:
C2BlockPool() = default;
};
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 752140a..f8aa672 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -187,6 +187,8 @@
kParamIndexPictureType,
kParamIndexHdr10PlusMetadata,
+ kParamIndexPictureQuantization,
+
/* ------------------------------------ video components ------------------------------------ */
kParamIndexFrameRate = C2_PARAM_INDEX_VIDEO_PARAM_START,
@@ -1699,6 +1701,31 @@
constexpr char C2_PARAMKEY_GOP[] = "coding.gop";
/**
+ * Quantization
+ * min/max for each picture type
+ *
+ */
+struct C2PictureQuantizationStruct {
+ C2PictureQuantizationStruct() : type_((C2Config::picture_type_t)0),
+ min(INT32_MIN), max(INT32_MAX) {}
+ C2PictureQuantizationStruct(C2Config::picture_type_t type, int32_t min_, int32_t max_)
+ : type_(type), min(min_), max(max_) { }
+
+ C2Config::picture_type_t type_;
+ int32_t min; // INT32_MIN == 'no lower bound specified'
+ int32_t max; // INT32_MAX == 'no upper bound specified'
+
+ DEFINE_AND_DESCRIBE_C2STRUCT(PictureQuantization)
+ C2FIELD(type_, "type")
+ C2FIELD(min, "min")
+ C2FIELD(max, "max")
+};
+
+typedef C2StreamParam<C2Tuning, C2SimpleArrayStruct<C2PictureQuantizationStruct>,
+ kParamIndexPictureQuantization> C2StreamPictureQuantizationTuning;
+constexpr char C2_PARAMKEY_PICTURE_QUANTIZATION[] = "coding.qp";
+
+/**
* Sync frame can be requested on demand by the client.
*
* If true, the next I frame shall be encoded as a sync frame. This config can be passed
diff --git a/media/codec2/hidl/1.0/vts/.clang-format b/media/codec2/hidl/1.0/vts/.clang-format
new file mode 120000
index 0000000..136279c
--- /dev/null
+++ b/media/codec2/hidl/1.0/vts/.clang-format
@@ -0,0 +1 @@
+../../../../../../../build/soong/scripts/system-clang-format
\ No newline at end of file
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 3a47ae9..efc5813 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -33,14 +33,40 @@
using android::C2AllocatorIon;
#include "media_c2_hidl_test_common.h"
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kDecodeTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+struct CompToURL {
+ std::string mime;
+ std::string mURL;
+ std::string info;
+};
-// Resource directory
-static std::string sResourceDir = "";
+std::vector<CompToURL> kCompToURL = {
+ {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
+ {"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
+ "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
+ {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+ {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+ "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
+ {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
+ {"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+ "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"},
+ {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.info"},
+ {"amr-wb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+ "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"},
+ {"vorbis", "bbb_vorbis_stereo_128kbps_48000hz.vorbis",
+ "bbb_vorbis_stereo_128kbps_48000hz.info"},
+ {"opus", "bbb_opus_stereo_128kbps_48000hz.opus", "bbb_opus_stereo_128kbps_48000hz.info"},
+ {"g711-alaw", "bbb_g711alaw_1ch_8khz.raw", "bbb_g711alaw_1ch_8khz.info"},
+ {"g711-mlaw", "bbb_g711mulaw_1ch_8khz.raw", "bbb_g711mulaw_1ch_8khz.info"},
+ {"gsm", "bbb_gsm_1ch_8khz_13kbps.raw", "bbb_gsm_1ch_8khz_13kbps.info"},
+ {"raw", "bbb_raw_1ch_8khz_s32le.raw", "bbb_raw_1ch_8khz_s32le.info"},
+ {"flac", "bbb_flac_stereo_680kbps_48000hz.flac", "bbb_flac_stereo_680kbps_48000hz.info"},
+};
class LinearBuffer : public C2Buffer {
public:
@@ -76,33 +102,17 @@
mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
ASSERT_NE(mLinearPool, nullptr);
- mCompName = unknown_comp;
- struct StringToName {
- const char* Name;
- standardComp CompName;
- };
- const StringToName kStringToName[] = {
- {"xaac", xaac}, {"mp3", mp3}, {"amrnb", amrnb},
- {"amrwb", amrwb}, {"aac", aac}, {"vorbis", vorbis},
- {"opus", opus}, {"pcm", pcm}, {"g711.alaw", g711alaw},
- {"g711.mlaw", g711mlaw}, {"gsm", gsm}, {"raw", raw},
- {"flac", flac},
- };
- const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+ std::vector<std::unique_ptr<C2Param>> queried;
+ mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+ ASSERT_GT(queried.size(), 0);
- // Find the component type
- for (size_t i = 0; i < kNumStringToName; ++i) {
- if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
- mCompName = kStringToName[i].CompName;
- break;
- }
- }
+ mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
+
mEos = false;
mFramesReceived = 0;
mTimestampUs = 0u;
mWorkResult = C2_OK;
mTimestampDevTest = false;
- if (mCompName == unknown_comp) mDisableTest = true;
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -119,6 +129,8 @@
virtual void validateTimestampList(int32_t* bitStreamInfo);
+ void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+
struct outputMetaData {
uint64_t timestampUs;
uint32_t rangeLength;
@@ -158,29 +170,12 @@
}
}
- enum standardComp {
- xaac,
- mp3,
- amrnb,
- amrwb,
- aac,
- vorbis,
- opus,
- pcm,
- g711alaw,
- g711mlaw,
- gsm,
- raw,
- flac,
- unknown_comp,
- };
-
+ std::string mMime;
std::string mInstanceName;
std::string mComponentName;
bool mEos;
bool mDisableTest;
bool mTimestampDevTest;
- standardComp mCompName;
int32_t mWorkResult;
uint64_t mTimestampUs;
@@ -207,9 +202,8 @@
}
};
-class Codec2AudioDecHidlTest
- : public Codec2AudioDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioDecHidlTest : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -217,7 +211,7 @@
};
void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
- Codec2AudioDecHidlTest::standardComp compName, bool& disableTest) {
+ bool& disableTest) {
// Validate its a C2 Component
if (component->getName().find("c2") == std::string::npos) {
ALOGE("Not a c2 component");
@@ -244,13 +238,6 @@
return;
}
}
-
- // Validates component name
- if (compName == Codec2AudioDecHidlTest::unknown_comp) {
- ALOGE("Component InValid");
- disableTest = true;
- return;
- }
ALOGV("Component Valid");
}
@@ -271,7 +258,7 @@
// parsing the header of elementary stream. Client needs to collect this
// information and reconfigure
void getInputChannelInfo(const std::shared_ptr<android::Codec2Client::Component>& component,
- Codec2AudioDecHidlTest::standardComp compName, int32_t* bitStreamInfo) {
+ std::string mime, int32_t* bitStreamInfo) {
// query nSampleRate and nChannels
std::initializer_list<C2Param::Index> indices{
C2StreamSampleRateInfo::output::PARAM_TYPE,
@@ -288,89 +275,29 @@
C2Param* param = inParams[i].get();
bitStreamInfo[i] = *(int32_t*)((uint8_t*)param + offset);
}
- switch (compName) {
- case Codec2AudioDecHidlTest::amrnb: {
- ASSERT_EQ(bitStreamInfo[0], 8000);
- ASSERT_EQ(bitStreamInfo[1], 1);
- break;
- }
- case Codec2AudioDecHidlTest::amrwb: {
- ASSERT_EQ(bitStreamInfo[0], 16000);
- ASSERT_EQ(bitStreamInfo[1], 1);
- break;
- }
- case Codec2AudioDecHidlTest::gsm: {
- ASSERT_EQ(bitStreamInfo[0], 8000);
- break;
- }
- default:
- break;
+ if (mime.find("3gpp") != std::string::npos) {
+ ASSERT_EQ(bitStreamInfo[0], 8000);
+ ASSERT_EQ(bitStreamInfo[1], 1);
+ } else if (mime.find("amr-wb") != std::string::npos) {
+ ASSERT_EQ(bitStreamInfo[0], 16000);
+ ASSERT_EQ(bitStreamInfo[1], 1);
+ } else if (mime.find("gsm") != std::string::npos) {
+ ASSERT_EQ(bitStreamInfo[0], 8000);
}
}
}
-// number of elementary streams per component
-#define STREAM_COUNT 2
-
// LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioDecHidlTest::standardComp comp, char* mURL, char* info,
- size_t streamIndex = 0) {
- struct CompToURL {
- Codec2AudioDecHidlTest::standardComp comp;
- const char mURL[STREAM_COUNT][512];
- const char info[STREAM_COUNT][512];
- };
- ASSERT_TRUE(streamIndex < STREAM_COUNT);
-
- static const CompToURL kCompToURL[] = {
- {Codec2AudioDecHidlTest::standardComp::xaac,
- {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
- {"bbb_aac_stereo_128kbps_48000hz.info",
- "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
- {Codec2AudioDecHidlTest::standardComp::mp3,
- {"bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.mp3"},
- {"bbb_mp3_stereo_192kbps_48000hz.info",
- "bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"}},
- {Codec2AudioDecHidlTest::standardComp::aac,
- {"bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.aac"},
- {"bbb_aac_stereo_128kbps_48000hz.info",
- "bbb_aac_stereo_128kbps_48000hz_multi_frame.info"}},
- {Codec2AudioDecHidlTest::standardComp::amrnb,
- {"sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.amrnb"},
- {"sine_amrnb_1ch_12kbps_8000hz.info",
- "sine_amrnb_1ch_12kbps_8000hz_multi_frame.info"}},
- {Codec2AudioDecHidlTest::standardComp::amrwb,
- {"bbb_amrwb_1ch_14kbps_16000hz.amrwb", "bbb_amrwb_1ch_14kbps_16000hz.amrwb"},
- {"bbb_amrwb_1ch_14kbps_16000hz.info",
- "bbb_amrwb_1ch_14kbps_16000hz_multi_frame.info"}},
- {Codec2AudioDecHidlTest::standardComp::vorbis,
- {"bbb_vorbis_stereo_128kbps_48000hz.vorbis", ""},
- {"bbb_vorbis_stereo_128kbps_48000hz.info", ""}},
- {Codec2AudioDecHidlTest::standardComp::opus,
- {"bbb_opus_stereo_128kbps_48000hz.opus", ""},
- {"bbb_opus_stereo_128kbps_48000hz.info", ""}},
- {Codec2AudioDecHidlTest::standardComp::g711alaw,
- {"bbb_g711alaw_1ch_8khz.raw", ""},
- {"bbb_g711alaw_1ch_8khz.info", ""}},
- {Codec2AudioDecHidlTest::standardComp::g711mlaw,
- {"bbb_g711mulaw_1ch_8khz.raw", ""},
- {"bbb_g711mulaw_1ch_8khz.info", ""}},
- {Codec2AudioDecHidlTest::standardComp::gsm,
- {"bbb_gsm_1ch_8khz_13kbps.raw", ""},
- {"bbb_gsm_1ch_8khz_13kbps.info", ""}},
- {Codec2AudioDecHidlTest::standardComp::raw,
- {"bbb_raw_1ch_8khz_s32le.raw", ""},
- {"bbb_raw_1ch_8khz_s32le.info", ""}},
- {Codec2AudioDecHidlTest::standardComp::flac,
- {"bbb_flac_stereo_680kbps_48000hz.flac", ""},
- {"bbb_flac_stereo_680kbps_48000hz.info", ""}},
- };
-
- for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
- if (kCompToURL[i].comp == comp) {
- strcat(mURL, kCompToURL[i].mURL[streamIndex]);
- strcat(info, kCompToURL[i].info[streamIndex]);
- return;
+void Codec2AudioDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
+ int streamCount = 0;
+ for (size_t i = 0; i < kCompToURL.size(); ++i) {
+ if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
+ if (streamCount == streamIndex) {
+ strcat(mURL, kCompToURL[i].mURL.c_str());
+ strcat(info, kCompToURL[i].info.c_str());
+ return;
+ }
+ streamCount++;
}
}
}
@@ -461,7 +388,7 @@
void Codec2AudioDecHidlTestBase::validateTimestampList(int32_t* bitStreamInfo) {
uint32_t samplesReceived = 0;
// Update SampleRate and ChannelCount
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
int32_t nSampleRate = bitStreamInfo[0];
int32_t nChannels = bitStreamInfo[1];
std::list<uint64_t>::iterator itIn = mTimestampUslist.begin();
@@ -486,7 +413,7 @@
TEST_P(Codec2AudioDecHidlTest, validateCompName) {
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
ALOGV("Checks if the given component is a valid audio component");
- validateComponent(mComponent, mCompName, mDisableTest);
+ validateComponent(mComponent, mDisableTest);
ASSERT_EQ(mDisableTest, false);
}
@@ -495,15 +422,13 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
ASSERT_EQ(mComponent->start(), C2_OK);
int32_t bitStreamInfo[2] = {0};
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
setupConfigParam(mComponent, bitStreamInfo);
ASSERT_EQ(mComponent->stop(), C2_OK);
}
-class Codec2AudioDecDecodeTest
- : public Codec2AudioDecHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioDecDecodeTest : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<DecodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -514,16 +439,15 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
- ;
- bool signalEOS = !std::get<3>(GetParam()).compare("true");
+ uint32_t streamIndex = std::get<2>(GetParam());
+ bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
char mURL[512], info[512];
android::Vector<FrameInfo> Info;
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info, streamIndex);
+ GetURLForComponent(mURL, info, streamIndex);
if (!strcmp(mURL, sResourceDir.c_str())) {
ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
return;
@@ -536,11 +460,11 @@
mFramesReceived = 0;
mTimestampUs = 0;
int32_t bitStreamInfo[2] = {0};
- if (mCompName == raw) {
+ if (mMime.find("raw") != std::string::npos) {
bitStreamInfo[0] = 8000;
bitStreamInfo[1] = 1;
} else {
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
}
if (!setupConfigParam(mComponent, bitStreamInfo)) {
std::cout << "[ WARN ] Test Skipped \n";
@@ -591,17 +515,17 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
int32_t bitStreamInfo[2] = {0};
- if (mCompName == raw) {
+ if (mMime.find("raw") != std::string::npos) {
bitStreamInfo[0] = 8000;
bitStreamInfo[1] = 1;
} else {
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
}
if (!setupConfigParam(mComponent, bitStreamInfo)) {
std::cout << "[ WARN ] Test Skipped \n";
@@ -683,17 +607,17 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
int32_t bitStreamInfo[2] = {0};
- if (mCompName == raw) {
+ if (mMime.find("raw") != std::string::npos) {
bitStreamInfo[0] = 8000;
bitStreamInfo[1] = 1;
} else {
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
}
if (!setupConfigParam(mComponent, bitStreamInfo)) {
std::cout << "[ WARN ] Test Skipped \n";
@@ -768,7 +692,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -798,11 +722,11 @@
}
eleInfo.close();
int32_t bitStreamInfo[2] = {0};
- if (mCompName == raw) {
+ if (mMime.find("raw") != std::string::npos) {
bitStreamInfo[0] = 8000;
bitStreamInfo[1] = 1;
} else {
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
}
if (!setupConfigParam(mComponent, bitStreamInfo)) {
std::cout << "[ WARN ] Test Skipped \n";
@@ -833,9 +757,8 @@
ASSERT_EQ(mComponent->stop(), C2_OK);
}
-class Codec2AudioDecCsdInputTests
- : public Codec2AudioDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2AudioDecCsdInputTests : public Codec2AudioDecHidlTestBase,
+ public ::testing::WithParamInterface<CsdFlushTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -853,7 +776,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
if (!strcmp(mURL, sResourceDir.c_str())) {
ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
return;
@@ -864,11 +787,11 @@
ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
int32_t bitStreamInfo[2] = {0};
- if (mCompName == raw) {
+ if (mMime.find("raw") != std::string::npos) {
bitStreamInfo[0] = 8000;
bitStreamInfo[1] = 1;
} else {
- ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mCompName, bitStreamInfo));
+ ASSERT_NO_FATAL_FAILURE(getInputChannelInfo(mComponent, mMime, bitStreamInfo));
}
if (!setupConfigParam(mComponent, bitStreamInfo)) {
std::cout << "[ WARN ] Test Skipped \n";
@@ -881,7 +804,7 @@
ASSERT_EQ(eleStream.is_open(), true);
bool signalEOS = false;
- bool flushCsd = !std::get<2>(GetParam()).compare("true");
+ bool flushCsd = std::get<2>(GetParam());
ALOGV("sending %d csd data ", numCsds);
int framesToDecode = numCsds;
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
@@ -937,44 +860,36 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioDecHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
// DecodeTest with StreamIndex and EOS / No EOS
INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2AudioDecDecodeTest,
testing::ValuesIn(kDecodeTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2AudioDecCsdInputTests,
testing::ValuesIn(kCsdFlushTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
} // anonymous namespace
int main(int argc, char** argv) {
+ parseArgs(argc, argv);
kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_DECODER);
for (auto params : kTestParameters) {
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
- }
-
- // Set the resource directory based on command line args.
- // Test will fail to set up if the argument is not set.
- for (int i = 1; i < argc; i++) {
- if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
- sResourceDir = argv[i + 1];
- break;
- }
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index e3a4f68..562c77f 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -35,11 +35,9 @@
#include "media_c2_hidl_test_common.h"
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kEncodeTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, int32_t>;
-// Resource directory
-static std::string sResourceDir = "";
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
class LinearBuffer : public C2Buffer {
public:
@@ -75,30 +73,17 @@
mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
ASSERT_NE(mLinearPool, nullptr);
- mCompName = unknown_comp;
- struct StringToName {
- const char* Name;
- standardComp CompName;
- };
- const StringToName kStringToName[] = {
- {"aac", aac}, {"flac", flac}, {"opus", opus}, {"amrnb", amrnb}, {"amrwb", amrwb},
- };
- const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
+ std::vector<std::unique_ptr<C2Param>> queried;
+ mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+ &queried);
+ ASSERT_GT(queried.size(), 0);
- // Find the component type
- for (size_t i = 0; i < kNumStringToName; ++i) {
- if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
- mCompName = kStringToName[i].CompName;
- break;
- }
- }
+ mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
mEos = false;
mCsd = false;
mFramesReceived = 0;
mWorkResult = C2_OK;
mOutputSize = 0u;
- if (mCompName == unknown_comp) mDisableTest = true;
- if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
getInputMaxBufSize();
}
@@ -113,6 +98,8 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
+ void GetURLForComponent(char* mURL);
+
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
for (std::unique_ptr<C2Work>& work : workItems) {
@@ -133,21 +120,13 @@
}
}
}
- enum standardComp {
- aac,
- flac,
- opus,
- amrnb,
- amrwb,
- unknown_comp,
- };
+ std::string mMime;
std::string mInstanceName;
std::string mComponentName;
bool mEos;
bool mCsd;
bool mDisableTest;
- standardComp mCompName;
int32_t mWorkResult;
uint32_t mFramesReceived;
@@ -192,9 +171,8 @@
}
};
-class Codec2AudioEncHidlTest
- : public Codec2AudioEncHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2AudioEncHidlTest : public Codec2AudioEncHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -202,7 +180,7 @@
};
void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
- Codec2AudioEncHidlTest::standardComp compName, bool& disableTest) {
+ bool& disableTest) {
// Validate its a C2 Component
if (component->getName().find("c2") == std::string::npos) {
ALOGE("Not a c2 component");
@@ -229,13 +207,6 @@
return;
}
}
-
- // Validates component name
- if (compName == Codec2AudioEncHidlTest::unknown_comp) {
- ALOGE("Component InValid");
- disableTest = true;
- return;
- }
ALOGV("Component Valid");
}
@@ -253,56 +224,48 @@
}
// Get config params for a component
-bool getConfigParams(Codec2AudioEncHidlTest::standardComp compName, int32_t* nChannels,
- int32_t* nSampleRate, int32_t* samplesPerFrame) {
- switch (compName) {
- case Codec2AudioEncHidlTest::aac:
- *nChannels = 2;
- *nSampleRate = 48000;
- *samplesPerFrame = 1024;
- break;
- case Codec2AudioEncHidlTest::flac:
- *nChannels = 2;
- *nSampleRate = 48000;
- *samplesPerFrame = 1152;
- break;
- case Codec2AudioEncHidlTest::opus:
- *nChannels = 2;
- *nSampleRate = 48000;
- *samplesPerFrame = 960;
- break;
- case Codec2AudioEncHidlTest::amrnb:
- *nChannels = 1;
- *nSampleRate = 8000;
- *samplesPerFrame = 160;
- break;
- case Codec2AudioEncHidlTest::amrwb:
- *nChannels = 1;
- *nSampleRate = 16000;
- *samplesPerFrame = 160;
- break;
- default:
- return false;
- }
+bool getConfigParams(std::string mime, int32_t* nChannels, int32_t* nSampleRate,
+ int32_t* samplesPerFrame) {
+ if (mime.find("mp4a-latm") != std::string::npos) {
+ *nChannels = 2;
+ *nSampleRate = 48000;
+ *samplesPerFrame = 1024;
+ } else if (mime.find("flac") != std::string::npos) {
+ *nChannels = 2;
+ *nSampleRate = 48000;
+ *samplesPerFrame = 1152;
+ } else if (mime.find("opus") != std::string::npos) {
+ *nChannels = 2;
+ *nSampleRate = 48000;
+ *samplesPerFrame = 960;
+ } else if (mime.find("3gpp") != std::string::npos) {
+ *nChannels = 1;
+ *nSampleRate = 8000;
+ *samplesPerFrame = 160;
+ } else if (mime.find("amr-wb") != std::string::npos) {
+ *nChannels = 1;
+ *nSampleRate = 16000;
+ *samplesPerFrame = 160;
+ } else
+ return false;
+
return true;
}
// LookUpTable of clips and metadata for component testing
-void GetURLForComponent(Codec2AudioEncHidlTest::standardComp comp, char* mURL) {
+void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL) {
struct CompToURL {
- Codec2AudioEncHidlTest::standardComp comp;
+ std::string mime;
const char* mURL;
};
static const CompToURL kCompToURL[] = {
- {Codec2AudioEncHidlTest::standardComp::aac, "bbb_raw_2ch_48khz_s16le.raw"},
- {Codec2AudioEncHidlTest::standardComp::amrnb, "bbb_raw_1ch_8khz_s16le.raw"},
- {Codec2AudioEncHidlTest::standardComp::amrwb, "bbb_raw_1ch_16khz_s16le.raw"},
- {Codec2AudioEncHidlTest::standardComp::flac, "bbb_raw_2ch_48khz_s16le.raw"},
- {Codec2AudioEncHidlTest::standardComp::opus, "bbb_raw_2ch_48khz_s16le.raw"},
+ {"mp4a-latm", "bbb_raw_2ch_48khz_s16le.raw"}, {"3gpp", "bbb_raw_1ch_8khz_s16le.raw"},
+ {"amr-wb", "bbb_raw_1ch_16khz_s16le.raw"}, {"flac", "bbb_raw_2ch_48khz_s16le.raw"},
+ {"opus", "bbb_raw_2ch_48khz_s16le.raw"},
};
for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
- if (kCompToURL[i].comp == comp) {
+ if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
strcat(mURL, kCompToURL[i].mURL);
return;
}
@@ -395,14 +358,12 @@
TEST_P(Codec2AudioEncHidlTest, validateCompName) {
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
ALOGV("Checks if the given component is a valid audio component");
- validateComponent(mComponent, mCompName, mDisableTest);
+ validateComponent(mComponent, mDisableTest);
ASSERT_EQ(mDisableTest, false);
}
-class Codec2AudioEncEncodeTest
- : public Codec2AudioEncHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2AudioEncEncodeTest : public Codec2AudioEncHidlTestBase,
+ public ::testing::WithParamInterface<EncodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -414,17 +375,17 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
char mURL[512];
strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL);
- bool signalEOS = !std::get<2>(GetParam()).compare("true");
+ GetURLForComponent(mURL);
+ bool signalEOS = std::get<2>(GetParam());
// Ratio w.r.t to mInputMaxBufSize
- int32_t inputMaxBufRatio = std::stoi(std::get<3>(GetParam()));
+ int32_t inputMaxBufRatio = std::get<3>(GetParam());
int32_t nChannels;
int32_t nSampleRate;
int32_t samplesPerFrame;
- if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -464,11 +425,9 @@
ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
ASSERT_TRUE(false);
}
- if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
- if (!mCsd) {
- ALOGE("CSD buffer missing");
- ASSERT_TRUE(false);
- }
+ if ((mMime.find("flac") != std::string::npos) || (mMime.find("opus") != std::string::npos) ||
+ (mMime.find("mp4a-latm") != std::string::npos)) {
+ ASSERT_TRUE(mCsd) << "CSD buffer missing";
}
ASSERT_EQ(mEos, true);
ASSERT_EQ(mComponent->stop(), C2_OK);
@@ -522,15 +481,15 @@
char mURL[512];
strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL);
+ GetURLForComponent(mURL);
mFlushedIndices.clear();
int32_t nChannels;
int32_t nSampleRate;
int32_t samplesPerFrame;
- if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -587,7 +546,7 @@
char mURL[512];
strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL);
+ GetURLForComponent(mURL);
std::ifstream eleStream;
eleStream.open(mURL, std::ifstream::binary);
@@ -600,8 +559,8 @@
int32_t numFrames = 16;
int32_t maxChannelCount = 8;
- if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -611,7 +570,7 @@
// Looping through the maximum number of channel count supported by encoder
for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
- ALOGV("Configuring %u encoder for channel count = %d", mCompName, nChannels);
+ ALOGV("Configuring encoder %s for channel count = %d", mComponentName.c_str(), nChannels);
if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
@@ -668,7 +627,9 @@
ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
ASSERT_TRUE(false);
}
- if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+ if ((mMime.find("flac") != std::string::npos) ||
+ (mMime.find("opus") != std::string::npos) ||
+ (mMime.find("mp4a-latm") != std::string::npos)) {
ASSERT_TRUE(mCsd) << "CSD buffer missing";
}
ASSERT_TRUE(mEos);
@@ -687,7 +648,7 @@
char mURL[512];
strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL);
+ GetURLForComponent(mURL);
std::ifstream eleStream;
eleStream.open(mURL, std::ifstream::binary);
@@ -699,8 +660,8 @@
int32_t nChannels;
int32_t numFrames = 16;
- if (!getConfigParams(mCompName, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mCompName << " component\n";
+ if (!getConfigParams(mMime, &nChannels, &nSampleRate, &samplesPerFrame)) {
+ std::cout << "Failed to get the config params for " << mComponentName << "\n";
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -711,7 +672,7 @@
uint32_t prevSampleRate = 0u;
for (int32_t nSampleRate : sampleRateValues) {
- ALOGV("Configuring %u encoder for SampleRate = %d", mCompName, nSampleRate);
+ ALOGV("Configuring encoder %s for SampleRate = %d", mComponentName.c_str(), nSampleRate);
if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
@@ -772,7 +733,9 @@
ALOGE("framesReceived : %d inputFrames : %u", mFramesReceived, numFrames);
ASSERT_TRUE(false);
}
- if ((mCompName == flac || mCompName == opus || mCompName == aac)) {
+ if ((mMime.find("flac") != std::string::npos) ||
+ (mMime.find("opus") != std::string::npos) ||
+ (mMime.find("mp4a-latm") != std::string::npos)) {
ASSERT_TRUE(mCsd) << "CSD buffer missing";
}
ASSERT_TRUE(mEos);
@@ -786,36 +749,28 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2AudioEncHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
// EncodeTest with EOS / No EOS and inputMaxBufRatio
// inputMaxBufRatio is ratio w.r.t. to mInputMaxBufSize
INSTANTIATE_TEST_SUITE_P(EncodeTest, Codec2AudioEncEncodeTest,
testing::ValuesIn(kEncodeTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
} // anonymous namespace
int main(int argc, char** argv) {
+ parseArgs(argc, argv);
kTestParameters = getTestParameters(C2Component::DOMAIN_AUDIO, C2Component::KIND_ENCODER);
for (auto params : kTestParameters) {
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "1"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false, 1));
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false", "2"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false, 2));
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "1"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true, 1));
kEncodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true", "2"));
- }
-
- // Set the resource directory based on command line args.
- // Test will fail to set up if the argument is not set.
- for (int i = 1; i < argc; i++) {
- if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
- sResourceDir = argv[i + 1];
- break;
- }
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true, 2));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
index 0251ec2..1f1681d 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.cpp
@@ -22,6 +22,48 @@
#include <android/hardware/media/c2/1.0/IComponentStore.h>
+std::string sResourceDir = "";
+
+std::string sComponentNamePrefix = "";
+
+static constexpr struct option kArgOptions[] = {
+ {"res", required_argument, 0, 'P'},
+ {"prefix", required_argument, 0, 'p'},
+ {"help", required_argument, 0, 'h'},
+ {nullptr, 0, nullptr, 0},
+};
+
+void printUsage(char* me) {
+ std::cerr << "VTS tests to test codec2 components \n";
+ std::cerr << "Usage: " << me << " [options] \n";
+ std::cerr << "\t -P, --res: Mandatory path to a folder that contains test resources \n";
+ std::cerr << "\t -p, --prefix: Optional prefix to select component/s to be tested \n";
+ std::cerr << "\t All codecs are tested by default \n";
+ std::cerr << "\t Eg: c2.android - test codecs starting with c2.android \n";
+ std::cerr << "\t Eg: c2.android.aac.decoder - test a specific codec \n";
+ std::cerr << "\t -h, --help: Print usage \n";
+}
+
+void parseArgs(int argc, char** argv) {
+ int arg;
+ int option_index;
+ while ((arg = getopt_long(argc, argv, ":P:p:h", kArgOptions, &option_index)) != -1) {
+ switch (arg) {
+ case 'P':
+ sResourceDir = optarg;
+ break;
+ case 'p':
+ sComponentNamePrefix = optarg;
+ break;
+ case 'h':
+ printUsage(argv[0]);
+ break;
+ default:
+ break;
+ }
+ }
+}
+
// Test the codecs for NullBuffer, Empty Input Buffer with(out) flags set
void testInputBuffer(const std::shared_ptr<android::Codec2Client::Component>& component,
std::mutex& queueLock, std::list<std::unique_ptr<C2Work>>& workQueue,
@@ -92,8 +134,7 @@
for (size_t i = 0; i < updates.size(); ++i) {
C2Param* param = updates[i].get();
if (param->index() == C2StreamInitDataInfo::output::PARAM_TYPE) {
- C2StreamInitDataInfo::output* csdBuffer =
- (C2StreamInitDataInfo::output*)(param);
+ C2StreamInitDataInfo::output* csdBuffer = (C2StreamInitDataInfo::output*)(param);
size_t csdSize = csdBuffer->flexCount();
if (csdSize > 0) csd = true;
} else if ((param->index() == C2StreamSampleRateInfo::output::PARAM_TYPE) ||
@@ -118,8 +159,7 @@
typedef std::unique_lock<std::mutex> ULock;
ULock l(queueLock);
workQueue.push_back(std::move(work));
- if (!flushedIndices.empty() &&
- (frameIndexIt != flushedIndices.end())) {
+ if (!flushedIndices.empty() && (frameIndexIt != flushedIndices.end())) {
flushedIndices.erase(frameIndexIt);
}
queueCondition.notify_all();
@@ -136,15 +176,15 @@
}
// Return all test parameters, a list of tuple of <instance, component>
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters() {
+const std::vector<TestParameters>& getTestParameters() {
return getTestParameters(C2Component::DOMAIN_OTHER, C2Component::KIND_OTHER);
}
// Return all test parameters, a list of tuple of <instance, component> with matching domain and
// kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
- C2Component::domain_t domain, C2Component::kind_t kind) {
- static std::vector<std::tuple<std::string, std::string>> parameters;
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+ C2Component::kind_t kind) {
+ static std::vector<TestParameters> parameters;
auto instances = android::Codec2Client::GetServiceNames();
for (std::string instance : instances) {
@@ -157,11 +197,18 @@
(traits.domain != domain || traits.kind != kind)) {
continue;
}
-
+ if (traits.name.rfind(sComponentNamePrefix, 0) != 0) {
+ ALOGD("Skipping tests for %s. Prefix specified is %s", traits.name.c_str(),
+ sComponentNamePrefix.c_str());
+ continue;
+ }
parameters.push_back(std::make_tuple(instance, traits.name));
}
}
+ if (parameters.empty()) {
+ ALOGE("No test parameters added. Verify component prefix passed to the test");
+ }
return parameters;
}
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index 50e3ac5..e74f247 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -40,7 +40,14 @@
using namespace ::std::chrono;
-static std::vector<std::tuple<std::string, std::string>> kTestParameters;
+using TestParameters = std::tuple<std::string, std::string>;
+static std::vector<TestParameters> kTestParameters;
+
+// Resource directory
+extern std::string sResourceDir;
+
+// Component name prefix
+extern std::string sComponentNamePrefix;
struct FrameInfo {
int bytesCount;
@@ -48,6 +55,18 @@
int64_t timestamp;
};
+template <typename... T>
+static inline std::string PrintInstanceTupleNameToString(
+ const testing::TestParamInfo<std::tuple<T...>>& info) {
+ std::stringstream ss;
+ std::apply([&ss](auto&&... elems) { ((ss << elems << '_'), ...); }, info.param);
+ ss << info.index;
+ std::string param_string = ss.str();
+ auto isNotAlphaNum = [](char c) { return !std::isalnum(c); };
+ std::replace_if(param_string.begin(), param_string.end(), isNotAlphaNum, '_');
+ return param_string;
+}
+
/*
* Handle Callback functions onWorkDone(), onTripped(),
* onError(), onDeath(), onFramesRendered()
@@ -105,13 +124,15 @@
std::function<void(std::list<std::unique_ptr<C2Work>>& workItems)> callBack;
};
+void parseArgs(int argc, char** argv);
+
// Return all test parameters, a list of tuple of <instance, component>.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters();
+const std::vector<TestParameters>& getTestParameters();
// Return all test parameters, a list of tuple of <instance, component> with matching domain and
// kind.
-const std::vector<std::tuple<std::string, std::string>>& getTestParameters(
- C2Component::domain_t domain, C2Component::kind_t kind);
+const std::vector<TestParameters>& getTestParameters(C2Component::domain_t domain,
+ C2Component::kind_t kind);
/*
* common functions declarations
diff --git a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index 6122225..29acd33 100644
--- a/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -53,9 +53,8 @@
}
namespace {
-
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kInputTestParameters;
+using InputTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<InputTestParameters> kInputTestParameters;
// google.codec2 Component test setup
class Codec2ComponentHidlTestBase : public ::testing::Test {
@@ -120,9 +119,8 @@
}
};
-class Codec2ComponentHidlTest
- : public Codec2ComponentHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2ComponentHidlTest : public Codec2ComponentHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -317,10 +315,8 @@
ASSERT_EQ(err, C2_OK);
}
-class Codec2ComponentInputTests
- : public Codec2ComponentHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2ComponentInputTests : public Codec2ComponentHidlTestBase,
+ public ::testing::WithParamInterface<InputTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -330,8 +326,8 @@
TEST_P(Codec2ComponentInputTests, InputBufferTest) {
description("Tests for different inputs");
- uint32_t flags = std::stoul(std::get<2>(GetParam()));
- bool isNullBuffer = !std::get<3>(GetParam()).compare("true");
+ uint32_t flags = std::get<2>(GetParam());
+ bool isNullBuffer = std::get<3>(GetParam());
if (isNullBuffer)
ALOGD("Testing for null input buffer with flag : %u", flags);
else
@@ -350,31 +346,28 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2ComponentHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_CASE_P(NonStdInputs, Codec2ComponentInputTests,
- testing::ValuesIn(kInputTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ testing::ValuesIn(kInputTestParameters), PrintInstanceTupleNameToString<>);
} // anonymous namespace
// TODO: Add test for Invalid work,
// TODO: Add test for Invalid states
int main(int argc, char** argv) {
+ parseArgs(argc, argv);
kTestParameters = getTestParameters();
for (auto params : kTestParameters) {
kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+ kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+ C2FrameData::FLAG_END_OF_STREAM, true));
kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params),
- std::to_string(C2FrameData::FLAG_END_OF_STREAM), "true"));
- kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
- kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params),
- std::to_string(C2FrameData::FLAG_CODEC_CONFIG), "false"));
- kInputTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params),
- std::to_string(C2FrameData::FLAG_END_OF_STREAM), "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+ kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+ C2FrameData::FLAG_CODEC_CONFIG, false));
+ kInputTestParameters.push_back(std::make_tuple(std::get<0>(params), std::get<1>(params),
+ C2FrameData::FLAG_END_OF_STREAM, false));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index b520c17..d0a1c31 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -40,13 +40,44 @@
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kDecodeTestParameters;
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+static std::vector<DecodeTestParameters> kDecodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string>> kCsdFlushTestParameters;
+using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
+static std::vector<CsdFlushTestParameters> kCsdFlushTestParameters;
-// Resource directory
-static std::string sResourceDir = "";
+struct CompToURL {
+ std::string mime;
+ std::string mURL;
+ std::string info;
+ std::string chksum;
+};
+std::vector<CompToURL> kCompToURL = {
+ {"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
+ "bbb_avc_176x144_300kbps_60fps_chksum.md5"},
+ {"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
+ "bbb_avc_640x360_768kbps_30fps_chksum.md5"},
+ {"hevc", "bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_176x144_176kbps_60fps.info",
+ "bbb_hevc_176x144_176kbps_60fps_chksum.md5"},
+ {"hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.info",
+ "bbb_hevc_640x360_1600kbps_30fps_chksum.md5"},
+ {"mpeg2", "bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_176x144_105kbps_25fps.info",
+ ""},
+ {"mpeg2", "bbb_mpeg2_352x288_1mbps_60fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
+ {"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
+ {"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
+ ""},
+ {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
+ {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+ "bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
+ {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
+ {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
+ "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+ {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+ "bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
+ {"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
+ {"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
+};
class LinearBuffer : public C2Buffer {
public:
@@ -85,26 +116,11 @@
mLinearPool = std::make_shared<C2PooledBlockPool>(mLinearAllocator, mBlockPoolId++);
ASSERT_NE(mLinearPool, nullptr);
- mCompName = unknown_comp;
- struct StringToName {
- const char* Name;
- standardComp CompName;
- };
+ std::vector<std::unique_ptr<C2Param>> queried;
+ mComponent->query({}, {C2PortMediaTypeSetting::input::PARAM_TYPE}, C2_DONT_BLOCK, &queried);
+ ASSERT_GT(queried.size(), 0);
- const StringToName kStringToName[] = {
- {"h263", h263}, {"avc", avc}, {"mpeg2", mpeg2}, {"mpeg4", mpeg4},
- {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9}, {"av1", av1},
- };
-
- const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
- // Find the component type
- for (size_t i = 0; i < kNumStringToName; ++i) {
- if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
- mCompName = kStringToName[i].CompName;
- break;
- }
- }
+ mMime = ((C2PortMediaTypeSetting::input*)queried[0].get())->m.value;
mEos = false;
mFramesReceived = 0;
mTimestampUs = 0u;
@@ -114,11 +130,11 @@
mMd5Offset = 0;
mMd5Enable = false;
mRefMd5 = nullptr;
- if (mCompName == unknown_comp) mDisableTest = true;
C2SecureModeTuning secureModeTuning{};
mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
- if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+ if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+ secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
mDisableTest = true;
}
@@ -136,6 +152,9 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
+ void GetURLChksmForComponent(char* mURL, char* info, char* chksum, size_t streamIndex);
+ void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+
/* Calculate the CKSUM for the data in inbuf */
void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
uint32_t u4_height, uint8_t* pu1_cksum_p) {
@@ -220,8 +239,7 @@
if (!codecConfig && !work->worklets.front()->output.buffers.empty()) {
if (mReorderDepth < 0) {
C2PortReorderBufferDepthTuning::output reorderBufferDepth;
- mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK,
- nullptr);
+ mComponent->query({&reorderBufferDepth}, {}, C2_MAY_BLOCK, nullptr);
mReorderDepth = reorderBufferDepth.value;
if (mReorderDepth > 0) {
// TODO: Add validation for reordered output
@@ -267,18 +285,7 @@
}
}
- enum standardComp {
- h263,
- avc,
- mpeg2,
- mpeg4,
- hevc,
- vp8,
- vp9,
- av1,
- unknown_comp,
- };
-
+ std::string mMime;
std::string mInstanceName;
std::string mComponentName;
@@ -291,7 +298,6 @@
char* mRefMd5;
std::list<uint64_t> mTimestampUslist;
std::list<uint64_t> mFlushedIndices;
- standardComp mCompName;
int32_t mWorkResult;
int32_t mReorderDepth;
@@ -314,9 +320,8 @@
}
};
-class Codec2VideoDecHidlTest
- : public Codec2VideoDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoDecHidlTest : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -324,7 +329,7 @@
};
void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
- Codec2VideoDecHidlTest::standardComp compName, bool& disableTest) {
+ bool& disableTest) {
// Validate its a C2 Component
if (component->getName().find("c2") == std::string::npos) {
ALOGE("Not a c2 component");
@@ -351,83 +356,32 @@
return;
}
}
-
- // Validates component name
- if (compName == Codec2VideoDecHidlTest::unknown_comp) {
- ALOGE("Component InValid");
- disableTest = true;
- return;
- }
ALOGV("Component Valid");
}
// number of elementary streams per component
#define STREAM_COUNT 3
// LookUpTable of clips, metadata and chksum for component testing
-void GetURLChksmForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
- char* chksum, size_t streamIndex = 1) {
- struct CompToURL {
- Codec2VideoDecHidlTest::standardComp comp;
- const char mURL[STREAM_COUNT][512];
- const char info[STREAM_COUNT][512];
- const char chksum[STREAM_COUNT][512];
- };
- ASSERT_TRUE(streamIndex < STREAM_COUNT);
-
- static const CompToURL kCompToURL[] = {
- {Codec2VideoDecHidlTest::standardComp::avc,
- {"bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_640x360_768kbps_30fps.h264", ""},
- {"bbb_avc_176x144_300kbps_60fps.info", "bbb_avc_640x360_768kbps_30fps.info", ""},
- {"bbb_avc_176x144_300kbps_60fps_chksum.md5",
- "bbb_avc_640x360_768kbps_30fps_chksum.md5", ""}},
- {Codec2VideoDecHidlTest::standardComp::hevc,
- {"bbb_hevc_176x144_176kbps_60fps.hevc", "bbb_hevc_640x360_1600kbps_30fps.hevc", ""},
- {"bbb_hevc_176x144_176kbps_60fps.info", "bbb_hevc_640x360_1600kbps_30fps.info", ""},
- {"bbb_hevc_176x144_176kbps_60fps_chksum.md5",
- "bbb_hevc_640x360_1600kbps_30fps_chksum.md5", ""}},
- {Codec2VideoDecHidlTest::standardComp::mpeg2,
- {"bbb_mpeg2_176x144_105kbps_25fps.m2v", "bbb_mpeg2_352x288_1mbps_60fps.m2v", ""},
- {"bbb_mpeg2_176x144_105kbps_25fps.info", "bbb_mpeg2_352x288_1mbps_60fps.info", ""},
- {"", "", ""}},
- {Codec2VideoDecHidlTest::standardComp::h263,
- {"", "bbb_h263_352x288_300kbps_12fps.h263", ""},
- {"", "bbb_h263_352x288_300kbps_12fps.info", ""},
- {"", "", ""}},
- {Codec2VideoDecHidlTest::standardComp::mpeg4,
- {"", "bbb_mpeg4_352x288_512kbps_30fps.m4v", ""},
- {"", "bbb_mpeg4_352x288_512kbps_30fps.info", ""},
- {"", "", ""}},
- {Codec2VideoDecHidlTest::standardComp::vp8,
- {"bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", ""},
- {"bbb_vp8_176x144_240kbps_60fps.info", "bbb_vp8_640x360_2mbps_30fps.info", ""},
- {"", "bbb_vp8_640x360_2mbps_30fps_chksm.md5", ""}},
- {Codec2VideoDecHidlTest::standardComp::vp9,
- {"bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
- "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9"},
- {"bbb_vp9_176x144_285kbps_60fps.info", "bbb_vp9_640x360_1600kbps_30fps.info",
- "bbb_vp9_704x480_280kbps_24fps_altref_2.info"},
- {"", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5", ""}},
- {Codec2VideoDecHidlTest::standardComp::av1,
- {"bbb_av1_640_360.av1", "bbb_av1_176_144.av1", ""},
- {"bbb_av1_640_360.info", "bbb_av1_176_144.info", ""},
- {"bbb_av1_640_360_chksum.md5", "bbb_av1_176_144_chksm.md5", ""}},
- };
-
- for (size_t i = 0; i < sizeof(kCompToURL) / sizeof(kCompToURL[0]); ++i) {
- if (kCompToURL[i].comp == comp) {
- strcat(mURL, kCompToURL[i].mURL[streamIndex]);
- strcat(info, kCompToURL[i].info[streamIndex]);
- strcat(chksum, kCompToURL[i].chksum[streamIndex]);
- return;
+void Codec2VideoDecHidlTestBase::GetURLChksmForComponent(char* mURL, char* info, char* chksum,
+ size_t streamIndex) {
+ int streamCount = 0;
+ for (size_t i = 0; i < kCompToURL.size(); ++i) {
+ if (mMime.find(kCompToURL[i].mime) != std::string::npos) {
+ if (streamCount == streamIndex) {
+ strcat(mURL, kCompToURL[i].mURL.c_str());
+ strcat(info, kCompToURL[i].info.c_str());
+ strcat(chksum, kCompToURL[i].chksum.c_str());
+ return;
+ }
+ streamCount++;
}
}
}
-void GetURLForComponent(Codec2VideoDecHidlTest::standardComp comp, char* mURL, char* info,
- size_t streamIndex = 1) {
+void Codec2VideoDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
char chksum[512];
strcpy(chksum, sResourceDir.c_str());
- GetURLChksmForComponent(comp, mURL, info, chksum, streamIndex);
+ GetURLChksmForComponent(mURL, info, chksum, streamIndex);
}
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -517,7 +471,7 @@
TEST_P(Codec2VideoDecHidlTest, validateCompName) {
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
ALOGV("Checks if the given component is a valid video component");
- validateComponent(mComponent, mCompName, mDisableTest);
+ validateComponent(mComponent, mDisableTest);
ASSERT_EQ(mDisableTest, false);
}
@@ -573,10 +527,8 @@
return false;
}
-class Codec2VideoDecDecodeTest
- : public Codec2VideoDecHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+class Codec2VideoDecDecodeTest : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<DecodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -588,8 +540,8 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::stoi(std::get<2>(GetParam()));
- bool signalEOS = !std::get<2>(GetParam()).compare("true");
+ uint32_t streamIndex = std::get<2>(GetParam());
+ bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
char mURL[512], info[512], chksum[512];
@@ -599,7 +551,7 @@
strcpy(info, sResourceDir.c_str());
strcpy(chksum, sResourceDir.c_str());
- GetURLChksmForComponent(mCompName, mURL, info, chksum, streamIndex);
+ GetURLChksmForComponent(mURL, info, chksum, streamIndex);
if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
ALOGV("Skipping Test, Stream not available");
return;
@@ -688,9 +640,11 @@
TEST_P(Codec2VideoDecHidlTest, AdaptiveDecodeTest) {
description("Adaptive Decode Test");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- if (!(mCompName == avc || mCompName == hevc || mCompName == vp8 || mCompName == vp9 ||
- mCompName == mpeg2))
+ if (!(strcasestr(mMime.c_str(), "avc") || strcasestr(mMime.c_str(), "hevc") ||
+ strcasestr(mMime.c_str(), "vp8") || strcasestr(mMime.c_str(), "vp9") ||
+ strcasestr(mMime.c_str(), "mpeg2"))) {
return;
+ }
typedef std::unique_lock<std::mutex> ULock;
ASSERT_EQ(mComponent->start(), C2_OK);
@@ -705,7 +659,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info, i % STREAM_COUNT);
+ GetURLForComponent(mURL, info, i % STREAM_COUNT);
if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
ALOGV("Stream not available, skipping this index");
continue;
@@ -801,7 +755,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
@@ -888,7 +842,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
mFlushedIndices.clear();
@@ -964,7 +918,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
eleInfo.open(info);
ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
@@ -1017,9 +971,8 @@
}
}
-class Codec2VideoDecCsdInputTests
- : public Codec2VideoDecHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string, std::string>> {
+class Codec2VideoDecCsdInputTests : public Codec2VideoDecHidlTestBase,
+ public ::testing::WithParamInterface<CsdFlushTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -1038,7 +991,7 @@
strcpy(mURL, sResourceDir.c_str());
strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mCompName, mURL, info);
+ GetURLForComponent(mURL, info);
int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
@@ -1052,7 +1005,7 @@
bool flushedDecoder = false;
bool signalEOS = false;
bool keyFrame = false;
- bool flushCsd = !std::get<2>(GetParam()).compare("true");
+ bool flushCsd = std::get<2>(GetParam());
ALOGV("sending %d csd data ", numCsds);
int framesToDecode = numCsds;
@@ -1122,49 +1075,41 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoDecHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
// DecodeTest with StreamIndex and EOS / No EOS
INSTANTIATE_TEST_SUITE_P(StreamIndexAndEOS, Codec2VideoDecDecodeTest,
testing::ValuesIn(kDecodeTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_SUITE_P(CsdInputs, Codec2VideoDecCsdInputTests,
testing::ValuesIn(kCsdFlushTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
} // anonymous namespace
// TODO : Video specific configuration Test
int main(int argc, char** argv) {
+ parseArgs(argc, argv);
kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
for (auto params : kTestParameters) {
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "0", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "false"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
kDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "2", "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "true"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), true));
kCsdFlushTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "false"));
- }
-
- // Set the resource directory based on command line args.
- // Test will fail to set up if the argument is not set.
- for (int i = 1; i < argc; i++) {
- if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
- sResourceDir = argv[i + 1];
- break;
- }
+ std::make_tuple(std::get<0>(params), std::get<1>(params), false));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index 5bcea5b..23ceff4 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -41,13 +41,11 @@
: C2Buffer({block->share(C2Rect(block->width(), block->height()), ::C2Fence())}) {}
};
-static std::vector<std::tuple<std::string, std::string, std::string, std::string, std::string>>
- kEncodeTestParameters;
-static std::vector<std::tuple<std::string, std::string, std::string, std::string>>
- kEncodeResolutionTestParameters;
+using EncodeTestParameters = std::tuple<std::string, std::string, bool, bool, bool>;
+static std::vector<EncodeTestParameters> kEncodeTestParameters;
-// Resource directory
-static std::string sResourceDir = "";
+using EncodeResolutionTestParameters = std::tuple<std::string, std::string, int32_t, int32_t>;
+static std::vector<EncodeResolutionTestParameters> kEncodeResolutionTestParameters;
namespace {
@@ -78,26 +76,13 @@
mGraphicPool = std::make_shared<C2PooledBlockPool>(mGraphicAllocator, mBlockPoolId++);
ASSERT_NE(mGraphicPool, nullptr);
- mCompName = unknown_comp;
- struct StringToName {
- const char* Name;
- standardComp CompName;
- };
+ std::vector<std::unique_ptr<C2Param>> queried;
+ mComponent->query({}, {C2PortMediaTypeSetting::output::PARAM_TYPE}, C2_DONT_BLOCK,
+ &queried);
+ ASSERT_GT(queried.size(), 0);
- const StringToName kStringToName[] = {
- {"h263", h263}, {"avc", avc}, {"mpeg4", mpeg4},
- {"hevc", hevc}, {"vp8", vp8}, {"vp9", vp9},
- };
-
- const size_t kNumStringToName = sizeof(kStringToName) / sizeof(kStringToName[0]);
-
- // Find the component type
- for (size_t i = 0; i < kNumStringToName; ++i) {
- if (strcasestr(mComponentName.c_str(), kStringToName[i].Name)) {
- mCompName = kStringToName[i].CompName;
- break;
- }
- }
+ mMime = ((C2PortMediaTypeSetting::output*)queried[0].get())->m.value;
+ std::cout << "mime : " << mMime << "\n";
mEos = false;
mCsd = false;
mConfigBPictures = false;
@@ -106,11 +91,11 @@
mTimestampUs = 0u;
mOutputSize = 0u;
mTimestampDevTest = false;
- if (mCompName == unknown_comp) mDisableTest = true;
C2SecureModeTuning secureModeTuning{};
mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
- if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+ if (secureModeTuning.value == C2Config::SM_READ_PROTECTED ||
+ secureModeTuning.value == C2Config::SM_READ_PROTECTED_WITH_ENCRYPTED) {
mDisableTest = true;
}
@@ -187,16 +172,7 @@
}
}
- enum standardComp {
- h263,
- avc,
- mpeg4,
- hevc,
- vp8,
- vp9,
- unknown_comp,
- };
-
+ std::string mMime;
std::string mInstanceName;
std::string mComponentName;
bool mEos;
@@ -204,7 +180,6 @@
bool mDisableTest;
bool mConfigBPictures;
bool mTimestampDevTest;
- standardComp mCompName;
uint32_t mFramesReceived;
uint32_t mFailedWorkReceived;
uint64_t mTimestampUs;
@@ -231,9 +206,8 @@
}
};
-class Codec2VideoEncHidlTest
- : public Codec2VideoEncHidlTestBase,
- public ::testing::WithParamInterface<std::tuple<std::string, std::string>> {
+class Codec2VideoEncHidlTest : public Codec2VideoEncHidlTestBase,
+ public ::testing::WithParamInterface<TestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -241,7 +215,7 @@
};
void validateComponent(const std::shared_ptr<android::Codec2Client::Component>& component,
- Codec2VideoEncHidlTest::standardComp compName, bool& disableTest) {
+ bool& disableTest) {
// Validate its a C2 Component
if (component->getName().find("c2") == std::string::npos) {
ALOGE("Not a c2 component");
@@ -268,13 +242,6 @@
return;
}
}
-
- // Validates component name
- if (compName == Codec2VideoEncHidlTest::unknown_comp) {
- ALOGE("Component InValid");
- disableTest = true;
- return;
- }
ALOGV("Component Valid");
}
@@ -405,14 +372,12 @@
TEST_P(Codec2VideoEncHidlTest, validateCompName) {
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
ALOGV("Checks if the given component is a valid video component");
- validateComponent(mComponent, mCompName, mDisableTest);
+ validateComponent(mComponent, mDisableTest);
ASSERT_EQ(mDisableTest, false);
}
-class Codec2VideoEncEncodeTest
- : public Codec2VideoEncHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string, std::string>> {
+class Codec2VideoEncEncodeTest : public Codec2VideoEncHidlTestBase,
+ public ::testing::WithParamInterface<EncodeTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -426,10 +391,10 @@
char mURL[512];
int32_t nWidth = ENC_DEFAULT_FRAME_WIDTH;
int32_t nHeight = ENC_DEFAULT_FRAME_HEIGHT;
- bool signalEOS = !std::get<2>(GetParam()).compare("true");
+ bool signalEOS = std::get<3>(GetParam());
// Send an empty frame to receive CSD data from encoder.
- bool sendEmptyFirstFrame = !std::get<3>(GetParam()).compare("true");
- mConfigBPictures = !std::get<4>(GetParam()).compare("true");
+ bool sendEmptyFirstFrame = std::get<3>(GetParam());
+ mConfigBPictures = std::get<4>(GetParam());
strcpy(mURL, sResourceDir.c_str());
GetURLForComponent(mURL);
@@ -517,9 +482,9 @@
ASSERT_TRUE(false);
}
- if (mCompName == vp8 || mCompName == h263) {
+ if ((mMime.find("vp8") != std::string::npos) || (mMime.find("3gpp") != std::string::npos)) {
ASSERT_FALSE(mCsd) << "CSD Buffer not expected";
- } else if (mCompName != vp9) {
+ } else if (mMime.find("vp9") == std::string::npos) {
ASSERT_TRUE(mCsd) << "CSD Buffer not received";
}
@@ -697,8 +662,7 @@
class Codec2VideoEncResolutionTest
: public Codec2VideoEncHidlTestBase,
- public ::testing::WithParamInterface<
- std::tuple<std::string, std::string, std::string, std::string>> {
+ public ::testing::WithParamInterface<EncodeResolutionTestParameters> {
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
@@ -710,8 +674,8 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
std::ifstream eleStream;
- int32_t nWidth = std::stoi(std::get<2>(GetParam()));
- int32_t nHeight = std::stoi(std::get<3>(GetParam()));
+ int32_t nWidth = std::get<2>(GetParam());
+ int32_t nHeight = std::get<3>(GetParam());
ALOGD("Trying encode for width %d height %d", nWidth, nHeight);
mEos = false;
@@ -743,14 +707,16 @@
}
INSTANTIATE_TEST_SUITE_P(PerInstance, Codec2VideoEncHidlTest, testing::ValuesIn(kTestParameters),
- android::hardware::PrintInstanceTupleNameToString<>);
+ PrintInstanceTupleNameToString<>);
INSTANTIATE_TEST_SUITE_P(NonStdSizes, Codec2VideoEncResolutionTest,
- ::testing::ValuesIn(kEncodeResolutionTestParameters));
+ ::testing::ValuesIn(kEncodeResolutionTestParameters),
+ PrintInstanceTupleNameToString<>);
// EncodeTest with EOS / No EOS
INSTANTIATE_TEST_SUITE_P(EncodeTestwithEOS, Codec2VideoEncEncodeTest,
- ::testing::ValuesIn(kEncodeTestParameters));
+ ::testing::ValuesIn(kEncodeTestParameters),
+ PrintInstanceTupleNameToString<>);
TEST_P(Codec2VideoEncHidlTest, AdaptiveBitrateTest) {
description("Encodes input file for different bitrates");
@@ -841,38 +807,26 @@
} // anonymous namespace
int main(int argc, char** argv) {
+ parseArgs(argc, argv);
kTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER);
for (auto params : kTestParameters) {
- constexpr char const* kBoolString[] = { "false", "true" };
for (size_t i = 0; i < 1 << 3; ++i) {
kEncodeTestParameters.push_back(std::make_tuple(
- std::get<0>(params), std::get<1>(params),
- kBoolString[i & 1],
- kBoolString[(i >> 1) & 1],
- kBoolString[(i >> 2) & 1]));
+ std::get<0>(params), std::get<1>(params), i & 1, (i >> 1) & 1, (i >> 2) & 1));
}
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "52", "18"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 52, 18));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "365", "365"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 365, 365));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "484", "362"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 484, 362));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "244", "488"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 244, 488));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "852", "608"));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 852, 608));
kEncodeResolutionTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), "1400", "442"));
- }
-
- // Set the resource directory based on command line args.
- // Test will fail to set up if the argument is not set.
- for (int i = 1; i < argc; i++) {
- if (strcmp(argv[i], "-P") == 0 && i < argc - 1) {
- sResourceDir = argv[i + 1];
- break;
- }
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1400, 442));
}
::testing::InitGoogleTest(&argc, argv);
diff --git a/media/codec2/hidl/plugin/FilterWrapper.cpp b/media/codec2/hidl/plugin/FilterWrapper.cpp
index 0b38bc1..bed8aeb 100644
--- a/media/codec2/hidl/plugin/FilterWrapper.cpp
+++ b/media/codec2/hidl/plugin/FilterWrapper.cpp
@@ -19,7 +19,6 @@
#include <android-base/logging.h>
#include <set>
-#include <sstream>
#include <dlfcn.h>
@@ -383,6 +382,9 @@
// Configure the next interface with the params.
std::vector<C2Param *> configParams;
for (size_t i = 0; i < heapParams.size(); ++i) {
+ if (!heapParams[i]) {
+ continue;
+ }
if (heapParams[i]->forStream()) {
heapParams[i] = C2Param::CopyAsStream(
*heapParams[i], false /* output */, heapParams[i]->stream());
@@ -782,10 +784,7 @@
if (C2_OK != mStore->createComponent(filter.traits.name, &comp)) {
return {};
}
- if (C2_OK != mStore->createInterface(filter.traits.name, &intf)) {
- return {};
- }
- filters.push_back({comp, intf, filter.traits, filter.desc});
+ filters.push_back({comp, comp->intf(), filter.traits, filter.desc});
}
return filters;
}
@@ -869,7 +868,7 @@
}
std::vector<Component> filters = createFilters();
std::shared_ptr wrapped = std::make_shared<WrappedDecoder>(
- comp, std::move(filters), weak_from_this());
+ comp, std::vector(filters), weak_from_this());
{
std::unique_lock lock(mWrappedComponentsMutex);
std::vector<std::weak_ptr<const C2Component>> &components =
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index 1a92c08..2460490 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -77,12 +77,10 @@
Mutexed<Jobs>::Locked jobs(mJobs);
ColorUtils::convertDataSpaceToV0(dataspace);
jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
- int32_t standard = (int32_t(dataspace) & HAL_DATASPACE_STANDARD_MASK)
- >> HAL_DATASPACE_STANDARD_SHIFT;
- int32_t transfer = (int32_t(dataspace) & HAL_DATASPACE_TRANSFER_MASK)
- >> HAL_DATASPACE_TRANSFER_SHIFT;
- int32_t range = (int32_t(dataspace) & HAL_DATASPACE_RANGE_MASK)
- >> HAL_DATASPACE_RANGE_SHIFT;
+ int32_t standard;
+ int32_t transfer;
+ int32_t range;
+ ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
std::make_unique<C2StreamColorAspectsInfo::input>(0u);
if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 5d587bc..9c04969 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -93,6 +93,9 @@
*/
void onInputBufferDone(c2_cntr64_t index);
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
android_dataspace getDataspace();
private:
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 7b914e4..5c387b3 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -996,7 +996,15 @@
// needed for decoders.
if (!(config->mDomain & Config::IS_ENCODER)) {
if (surface == nullptr) {
- format = flexPixelFormat.value_or(COLOR_FormatYUV420Flexible);
+ const char *prefix = "";
+ if (flexSemiPlanarPixelFormat) {
+ format = COLOR_FormatYUV420SemiPlanar;
+ prefix = "semi-";
+ } else {
+ format = COLOR_FormatYUV420Planar;
+ }
+ ALOGD("Client requested ByteBuffer mode decoder w/o color format set: "
+ "using default %splanar color format", prefix);
} else {
format = COLOR_FormatSurface;
}
@@ -1030,9 +1038,6 @@
}
}
- // get color aspects
- getColorAspectsFromFormat(msg, config->mClientColorAspects);
-
/*
* Handle dataspace
*/
@@ -1042,12 +1047,12 @@
int32_t width, height;
if (msg->findInt32("width", &width)
&& msg->findInt32("height", &height)) {
- setDefaultCodecColorAspectsIfNeeded(config->mClientColorAspects, width, height);
+ ColorAspects aspects;
+ getColorAspectsFromFormat(msg, aspects);
+ setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
// TODO: read dataspace / color aspect from the component
- setColorAspectsIntoFormat(
- config->mClientColorAspects, const_cast<sp<AMessage> &>(msg));
- dataSpace = getDataSpaceForColorAspects(
- config->mClientColorAspects, true /* mayexpand */);
+ setColorAspectsIntoFormat(aspects, const_cast<sp<AMessage> &>(msg));
+ dataSpace = getDataSpaceForColorAspects(aspects, true /* mayexpand */);
}
msg->setInt32("android._dataspace", (int32_t)dataSpace);
ALOGD("setting dataspace to %x", dataSpace);
@@ -1090,6 +1095,45 @@
configUpdate.push_back(std::move(gop));
}
+ if ((config->mDomain & Config::IS_ENCODER)
+ && (config->mDomain & Config::IS_VIDEO)) {
+ // we may not use all 3 of these entries
+ std::unique_ptr<C2StreamPictureQuantizationTuning::output> qp =
+ C2StreamPictureQuantizationTuning::output::AllocUnique(3 /* flexCount */,
+ 0u /* stream */);
+
+ int ix = 0;
+
+ int32_t iMax = INT32_MAX;
+ int32_t iMin = INT32_MIN;
+ (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MAX, &iMax);
+ (void) sdkParams->findInt32(KEY_VIDEO_QP_I_MIN, &iMin);
+ if (iMax != INT32_MAX || iMin != INT32_MIN) {
+ qp->m.values[ix++] = {I_FRAME, iMin, iMax};
+ }
+
+ int32_t pMax = INT32_MAX;
+ int32_t pMin = INT32_MIN;
+ (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MAX, &pMax);
+ (void) sdkParams->findInt32(KEY_VIDEO_QP_P_MIN, &pMin);
+ if (pMax != INT32_MAX || pMin != INT32_MIN) {
+ qp->m.values[ix++] = {P_FRAME, pMin, pMax};
+ }
+
+ int32_t bMax = INT32_MAX;
+ int32_t bMin = INT32_MIN;
+ (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MAX, &bMax);
+ (void) sdkParams->findInt32(KEY_VIDEO_QP_B_MIN, &bMin);
+ if (bMax != INT32_MAX || bMin != INT32_MIN) {
+ qp->m.values[ix++] = {B_FRAME, bMin, bMax};
+ }
+
+ // adjust to reflect actual use.
+ qp->setFlexCount(ix);
+
+ configUpdate.push_back(std::move(qp));
+ }
+
err = config->setParameters(comp, configUpdate, C2_DONT_BLOCK);
if (err != OK) {
ALOGW("failed to configure c2 params");
@@ -1576,6 +1620,7 @@
outputFormat = config->mOutputFormat = config->mOutputFormat->dup();
if (config->mInputSurface) {
err2 = config->mInputSurface->start();
+ config->mInputSurfaceDataspace = config->mInputSurface->getDataspace();
}
buffersBoundToCodec = config->mBuffersBoundToCodec;
}
@@ -1663,6 +1708,7 @@
if (config->mInputSurface) {
config->mInputSurface->disconnect();
config->mInputSurface = nullptr;
+ config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
}
}
{
@@ -1712,6 +1758,7 @@
if (config->mInputSurface) {
config->mInputSurface->disconnect();
config->mInputSurface = nullptr;
+ config->mInputSurfaceDataspace = HAL_DATASPACE_UNKNOWN;
}
}
@@ -1894,6 +1941,12 @@
params->removeEntryAt(params->findEntryByName(KEY_BIT_RATE));
}
+ int32_t syncId = 0;
+ if (params->findInt32("audio-hw-sync", &syncId)
+ || params->findInt32("hw-av-sync-id", &syncId)) {
+ configureTunneledVideoPlayback(comp, nullptr, params);
+ }
+
Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
const std::unique_ptr<Config> &config = *configLocked;
@@ -1965,6 +2018,39 @@
config->setParameters(comp, params, C2_MAY_BLOCK);
}
+status_t CCodec::querySupportedParameters(std::vector<std::string> *names) {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ return config->querySupportedParameters(names);
+}
+
+status_t CCodec::describeParameter(
+ const std::string &name, CodecParameterDescriptor *desc) {
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ return config->describe(name, desc);
+}
+
+status_t CCodec::subscribeToParameters(const std::vector<std::string> &names) {
+ std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+ if (!comp) {
+ return INVALID_OPERATION;
+ }
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ return config->subscribeToVendorConfigUpdate(comp, names);
+}
+
+status_t CCodec::unsubscribeFromParameters(const std::vector<std::string> &names) {
+ std::shared_ptr<Codec2Client::Component> comp = mState.lock()->comp;
+ if (!comp) {
+ return INVALID_OPERATION;
+ }
+ Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+ const std::unique_ptr<Config> &config = *configLocked;
+ return config->unsubscribeFromVendorConfigUpdate(comp, names);
+}
+
void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
if (!workItems.empty()) {
Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -1985,44 +2071,6 @@
}
}
-static void HandleDataspace(
- android_dataspace dataspace, ColorAspects *colorAspects, sp<AMessage> *format) {
- ColorUtils::convertDataSpaceToV0(dataspace);
- int32_t range, standard, transfer;
- range = (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
- if (range == 0) {
- range = ColorUtils::wrapColorAspectsIntoColorRange(
- colorAspects->mRange);
- }
- standard = (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
- if (standard == 0) {
- standard = ColorUtils::wrapColorAspectsIntoColorStandard(
- colorAspects->mPrimaries,
- colorAspects->mMatrixCoeffs);
- }
- transfer = (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
- if (transfer == 0) {
- transfer = ColorUtils::wrapColorAspectsIntoColorTransfer(
- colorAspects->mTransfer);
- }
- ColorAspects newColorAspects;
- ColorUtils::convertPlatformColorAspectsToCodecAspects(
- range, standard, transfer, newColorAspects);
- if (ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
- newColorAspects, *colorAspects)) {
- *format = (*format)->dup();
- (*format)->setInt32(KEY_COLOR_RANGE, range);
- (*format)->setInt32(KEY_COLOR_STANDARD, standard);
- (*format)->setInt32(KEY_COLOR_TRANSFER, transfer);
- // Record current color aspects into |colorAspects|.
- // NOTE: newColorAspects could have been modified by
- // checkIfAspectsChangedAndUnspecifyThem() above,
- // so *colorAspects = newColorAspects does not work as intended.
- ColorUtils::convertPlatformColorAspectsToCodecAspects(
- range, standard, transfer, *colorAspects);
- }
-}
-
void CCodec::onMessageReceived(const sp<AMessage> &msg) {
TimePoint now = std::chrono::steady_clock::now();
CCodecWatchdog::getInstance()->watch(this);
@@ -2137,10 +2185,6 @@
sp<AMessage> outputFormat = config->mOutputFormat;
config->updateConfiguration(updates, config->mOutputDomain);
- if (config->mInputSurface) {
- android_dataspace ds = config->mInputSurface->getDataspace();
- HandleDataspace(ds, &config->mClientColorAspects, &config->mOutputFormat);
- }
RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
// copy standard infos to graphic buffers if not already present (otherwise, we
@@ -2226,6 +2270,10 @@
return UNKNOWN_ERROR;
}
+ if (sidebandHandle == nullptr) {
+ return OK;
+ }
+
std::vector<std::unique_ptr<C2Param>> params;
c2err = comp->query({}, {C2PortTunnelHandleTuning::output::PARAM_TYPE}, C2_DONT_BLOCK, ¶ms);
if (c2err == C2_OK && params.size() == 1u) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index ad02edb..0008172 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1348,7 +1348,7 @@
// about buffers from the previous generation do not interfere with the
// newly initialized pipeline capacity.
- {
+ if (inputFormat || outputFormat) {
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
watcher->inputDelay(inputDelayValue)
.pipelineDelay(pipelineDelayValue)
@@ -1448,14 +1448,14 @@
void CCodecBufferChannel::stop() {
mSync.stop();
mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
- if (mInputSurface != nullptr) {
- mInputSurface.reset();
- }
- mPipelineWatcher.lock()->flush();
}
void CCodecBufferChannel::reset() {
stop();
+ if (mInputSurface != nullptr) {
+ mInputSurface.reset();
+ }
+ mPipelineWatcher.lock()->flush();
{
Mutexed<Input>::Locked input(mInput);
input->buffers.reset(new DummyInputBuffers(""));
@@ -1483,8 +1483,10 @@
void CCodecBufferChannel::flush(const std::list<std::unique_ptr<C2Work>> &flushedWork) {
ALOGV("[%s] flush", mName);
+ std::vector<uint64_t> indices;
std::list<std::unique_ptr<C2Work>> configs;
for (const std::unique_ptr<C2Work> &work : flushedWork) {
+ indices.push_back(work->input.ordinal.frameIndex.peeku());
if (!(work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) {
continue;
}
@@ -1497,6 +1499,7 @@
std::unique_ptr<C2Work> copy(new C2Work);
copy->input.flags = C2FrameData::flags_t(work->input.flags | C2FrameData::FLAG_DROP_FRAME);
copy->input.ordinal = work->input.ordinal;
+ copy->input.ordinal.frameIndex = mFrameIndex++;
copy->input.buffers.insert(
copy->input.buffers.begin(),
work->input.buffers.begin(),
@@ -1525,7 +1528,12 @@
output->buffers->flushStash();
}
}
- mPipelineWatcher.lock()->flush();
+ {
+ Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+ for (uint64_t index : indices) {
+ watcher->onWorkDone(index);
+ }
+ }
}
void CCodecBufferChannel::onWorkDone(
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index 6825dc2..ba44074 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -77,34 +77,39 @@
void CCodecBuffers::handleImageData(const sp<Codec2Buffer> &buffer) {
sp<ABuffer> imageDataCandidate = buffer->getImageData();
if (imageDataCandidate == nullptr) {
+ if (mFormatWithImageData) {
+ // We previously sent the format with image data, so use the same format.
+ buffer->setFormat(mFormatWithImageData);
+ }
return;
}
- sp<ABuffer> imageData;
- if (!mFormat->findBuffer("image-data", &imageData)
- || imageDataCandidate->size() != imageData->size()
- || memcmp(imageDataCandidate->data(), imageData->data(), imageData->size()) != 0) {
+ if (!mLastImageData
+ || imageDataCandidate->size() != mLastImageData->size()
+ || memcmp(imageDataCandidate->data(),
+ mLastImageData->data(),
+ mLastImageData->size()) != 0) {
ALOGD("[%s] updating image-data", mName);
- sp<AMessage> newFormat = dupFormat();
- newFormat->setBuffer("image-data", imageDataCandidate);
+ mFormatWithImageData = dupFormat();
+ mLastImageData = imageDataCandidate;
+ mFormatWithImageData->setBuffer("image-data", imageDataCandidate);
MediaImage2 *img = (MediaImage2*)imageDataCandidate->data();
if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
int32_t stride = img->mPlane[0].mRowInc;
- newFormat->setInt32(KEY_STRIDE, stride);
+ mFormatWithImageData->setInt32(KEY_STRIDE, stride);
ALOGD("[%s] updating stride = %d", mName, stride);
if (img->mNumPlanes > 1 && stride > 0) {
int64_t offsetDelta =
(int64_t)img->mPlane[1].mOffset - (int64_t)img->mPlane[0].mOffset;
int32_t vstride = int32_t(offsetDelta / stride);
- newFormat->setInt32(KEY_SLICE_HEIGHT, vstride);
+ mFormatWithImageData->setInt32(KEY_SLICE_HEIGHT, vstride);
ALOGD("[%s] updating vstride = %d", mName, vstride);
buffer->setRange(
img->mPlane[0].mOffset,
buffer->size() - img->mPlane[0].mOffset);
}
}
- setFormat(newFormat);
- buffer->setFormat(newFormat);
}
+ buffer->setFormat(mFormatWithImageData);
}
// InputBuffers
@@ -273,22 +278,12 @@
if (entry.notify && mFormat != outputFormat) {
updateSkipCutBuffer(outputFormat);
- sp<ABuffer> imageData;
- if (mFormat->findBuffer("image-data", &imageData)) {
- outputFormat->setBuffer("image-data", imageData);
- }
- int32_t stride;
- if (mFormat->findInt32(KEY_STRIDE, &stride)) {
- outputFormat->setInt32(KEY_STRIDE, stride);
- }
- int32_t sliceHeight;
- if (mFormat->findInt32(KEY_SLICE_HEIGHT, &sliceHeight)) {
- outputFormat->setInt32(KEY_SLICE_HEIGHT, sliceHeight);
- }
+ // Trigger image data processing to the new format
+ mLastImageData.clear();
ALOGV("[%s] popFromStashAndRegister: output format reference changed: %p -> %p",
mName, mFormat.get(), outputFormat.get());
- ALOGD("[%s] popFromStashAndRegister: output format changed to %s",
- mName, outputFormat->debugString().c_str());
+ ALOGD("[%s] popFromStashAndRegister: at %lldus, output format changed to %s",
+ mName, (long long)entry.timestamp, outputFormat->debugString().c_str());
setFormat(outputFormat);
}
diff --git a/media/codec2/sfplugin/CCodecBuffers.h b/media/codec2/sfplugin/CCodecBuffers.h
index 7c4e7b1..995d3a4 100644
--- a/media/codec2/sfplugin/CCodecBuffers.h
+++ b/media/codec2/sfplugin/CCodecBuffers.h
@@ -86,6 +86,9 @@
// Format to be used for creating MediaCodec-facing buffers.
sp<AMessage> mFormat;
+ sp<ABuffer> mLastImageData;
+ sp<AMessage> mFormatWithImageData;
+
private:
DISALLOW_EVIL_CONSTRUCTORS(CCodecBuffers);
};
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index f5cc98e..727b1ff 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -24,6 +24,7 @@
#include <C2Param.h>
#include <util/C2InterfaceHelper.h>
+#include <media/stagefright/CodecBase.h>
#include <media/stagefright/MediaCodecConstants.h>
#include "CCodecConfig.h"
@@ -290,8 +291,8 @@
std::vector<std::string> getPathsForDomain(
Domain any, Domain all = Domain::ALL) const {
std::vector<std::string> res;
- for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mConfigMappers) {
- for (const ConfigMapper &cm : el.second) {
+ for (const auto &[key, mappers] : mConfigMappers) {
+ for (const ConfigMapper &cm : mappers) {
ALOGV("filtering %s %x %x %x %x", cm.path().c_str(), cm.domain(), any,
(cm.domain() & any), (cm.domain() & any & all));
if ((cm.domain() & any) && ((cm.domain() & any & all) == (any & all))) {
@@ -361,7 +362,10 @@
.limitTo(D::OUTPUT & D::READ));
add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
- .limitTo(D::ENCODER & D::OUTPUT));
+ .limitTo(D::ENCODER & D::CODED));
+ // Some audio decoders require bitrate information to be set
+ add(ConfigMapper(KEY_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
+ .limitTo(D::AUDIO & D::DECODER & D::CODED));
// we also need to put the bitrate in the max bitrate field
add(ConfigMapper(KEY_MAX_BIT_RATE, C2_PARAMKEY_BITRATE, "value")
.limitTo(D::ENCODER & D::READ & D::OUTPUT));
@@ -729,6 +733,17 @@
return C2Value();
}));
+ add(ConfigMapper(KEY_AAC_PROFILE, C2_PARAMKEY_PROFILE_LEVEL, "profile")
+ .limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM))
+ .withMapper([mapper](C2Value v) -> C2Value {
+ C2Config::profile_t c2 = PROFILE_UNUSED;
+ int32_t sdk;
+ if (mapper && v.get(&sdk) && mapper->mapProfile(sdk, &c2)) {
+ return c2;
+ }
+ return PROFILE_UNUSED;
+ }));
+
// convert to dBFS and add default
add(ConfigMapper(KEY_AAC_DRC_TARGET_REFERENCE_LEVEL, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL, "value")
.limitTo(D::AUDIO & D::DECODER & (D::CONFIG | D::PARAM | D::READ))
@@ -1061,7 +1076,7 @@
std::vector<std::string> keys;
mParamUpdater->getKeysForParamIndex(desc->index(), &keys);
for (const std::string &key : keys) {
- mVendorParamIndices.insert_or_assign(key, desc->index());
+ mVendorParams.insert_or_assign(key, desc);
}
}
}
@@ -1128,6 +1143,12 @@
insertion.first->second = std::move(p);
}
}
+ if (mInputSurface
+ && (domain & mOutputDomain)
+ && mInputSurfaceDataspace != mInputSurface->getDataspace()) {
+ changed = true;
+ mInputSurfaceDataspace = mInputSurface->getDataspace();
+ }
ALOGV("updated configuration has %zu params (%s)", mCurrentConfig.size(),
changed ? "CHANGED" : "no change");
@@ -1193,8 +1214,8 @@
const ReflectedParamUpdater::Dict &reflected,
Domain portDomain) const {
sp<AMessage> msg = new AMessage;
- for (const std::pair<std::string, std::vector<ConfigMapper>> &el : mStandardParams->getKeys()) {
- for (const ConfigMapper &cm : el.second) {
+ for (const auto &[key, mappers] : mStandardParams->getKeys()) {
+ for (const ConfigMapper &cm : mappers) {
if ((cm.domain() & portDomain) == 0 // input-output-coded-raw
|| (cm.domain() & mDomain) != mDomain // component domain + kind (these must match)
|| (cm.domain() & IS_READ) == 0) {
@@ -1218,26 +1239,26 @@
ALOGD("unexpected untyped query value for key: %s", cm.path().c_str());
continue;
}
- msg->setItem(el.first.c_str(), item);
+ msg->setItem(key.c_str(), item);
}
}
bool input = (portDomain & Domain::IS_INPUT);
std::vector<std::string> vendorKeys;
- for (const std::pair<std::string, ReflectedParamUpdater::Value> &entry : reflected) {
- auto it = mVendorParamIndices.find(entry.first);
- if (it == mVendorParamIndices.end()) {
+ for (const auto &[key, value] : reflected) {
+ auto it = mVendorParams.find(key);
+ if (it == mVendorParams.end()) {
continue;
}
- if (mSubscribedIndices.count(it->second) == 0) {
+ C2Param::Index index = it->second->index();
+ if (mSubscribedIndices.count(index) == 0) {
continue;
}
// For vendor parameters, we only care about direction
- if ((input && !it->second.forInput())
- || (!input && !it->second.forOutput())) {
+ if ((input && !index.forInput())
+ || (!input && !index.forOutput())) {
continue;
}
- const ReflectedParamUpdater::Value &value = entry.second;
C2Value c2Value;
sp<ABuffer> bufValue;
AString strValue;
@@ -1249,10 +1270,10 @@
} else if (value.find(&strValue)) {
item.set(strValue);
} else {
- ALOGD("unexpected untyped query value for key: %s", entry.first.c_str());
+ ALOGD("unexpected untyped query value for key: %s", key.c_str());
continue;
}
- msg->setItem(entry.first.c_str(), item);
+ msg->setItem(key.c_str(), item);
}
{ // convert from Codec 2.0 rect to MediaFormat rect and add crop rect if not present
@@ -1312,6 +1333,14 @@
}
}
+ // Remove KEY_AAC_SBR_MODE from SDK message if it is outside supported range
+ // as SDK doesn't have a way to signal default sbr mode based on profile and
+ // requires that the key isn't present in format to signal that
+ int sbrMode;
+ if (msg->findInt32(KEY_AAC_SBR_MODE, &sbrMode) && (sbrMode < 0 || sbrMode > 2)) {
+ msg->removeEntryAt(msg->findEntryByName(KEY_AAC_SBR_MODE));
+ }
+
{ // convert color info
// move default color to color aspect if not read from the component
int32_t tmp;
@@ -1356,7 +1385,6 @@
msg->removeEntryAt(msg->findEntryByName("color-matrix"));
}
-
// calculate dataspace for raw graphic buffers if not specified by component, or if
// using surface with unspecified aspects (as those must be defaulted which may change
// the dataspace)
@@ -1394,6 +1422,23 @@
}
}
+ if (mInputSurface) {
+ android_dataspace dataspace = mInputSurface->getDataspace();
+ ColorUtils::convertDataSpaceToV0(dataspace);
+ int32_t standard;
+ ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+ if (range != 0) {
+ msg->setInt32(KEY_COLOR_RANGE, range);
+ }
+ if (standard != 0) {
+ msg->setInt32(KEY_COLOR_STANDARD, standard);
+ }
+ if (transfer != 0) {
+ msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+ }
+ msg->setInt32("android._dataspace", dataspace);
+ }
+
// HDR static info
C2HdrStaticMetadataStruct hdr;
@@ -1811,8 +1856,81 @@
status_t CCodecConfig::subscribeToAllVendorParams(
const std::shared_ptr<Codec2Client::Configurable> &configurable,
c2_blocking_t blocking) {
- for (const std::pair<std::string, C2Param::Index> &entry : mVendorParamIndices) {
- mSubscribedIndices.insert(entry.second);
+ for (const auto &[path, desc] : mVendorParams) {
+ mSubscribedIndices.insert(desc->index());
+ }
+ return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::querySupportedParameters(std::vector<std::string> *names) {
+ if (!names) {
+ return BAD_VALUE;
+ }
+ names->clear();
+ // TODO: expand to standard params
+ for (const auto &[key, desc] : mVendorParams) {
+ names->push_back(key);
+ }
+ return OK;
+}
+
+status_t CCodecConfig::describe(const std::string &name, CodecParameterDescriptor *desc) {
+ if (!desc) {
+ return BAD_VALUE;
+ }
+ // TODO: expand to standard params
+ desc->name = name;
+ switch (mParamUpdater->getTypeForKey(name)) {
+ case C2FieldDescriptor::INT32:
+ case C2FieldDescriptor::UINT32:
+ case C2FieldDescriptor::CNTR32:
+ desc->type = AMessage::kTypeInt32;
+ return OK;
+ case C2FieldDescriptor::INT64:
+ case C2FieldDescriptor::UINT64:
+ case C2FieldDescriptor::CNTR64:
+ desc->type = AMessage::kTypeInt64;
+ return OK;
+ case C2FieldDescriptor::FLOAT:
+ desc->type = AMessage::kTypeFloat;
+ return OK;
+ case C2FieldDescriptor::STRING:
+ desc->type = AMessage::kTypeString;
+ return OK;
+ case C2FieldDescriptor::BLOB:
+ desc->type = AMessage::kTypeBuffer;
+ return OK;
+ default:
+ return NAME_NOT_FOUND;
+ }
+}
+
+status_t CCodecConfig::subscribeToVendorConfigUpdate(
+ const std::shared_ptr<Codec2Client::Configurable> &configurable,
+ const std::vector<std::string> &names,
+ c2_blocking_t blocking) {
+ for (const std::string &name : names) {
+ auto it = mVendorParams.find(name);
+ if (it == mVendorParams.end()) {
+ ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+ continue;
+ }
+ mSubscribedIndices.insert(it->second->index());
+ }
+ return subscribeToConfigUpdate(configurable, {}, blocking);
+}
+
+status_t CCodecConfig::unsubscribeFromVendorConfigUpdate(
+ const std::shared_ptr<Codec2Client::Configurable> &configurable,
+ const std::vector<std::string> &names,
+ c2_blocking_t blocking) {
+ for (const std::string &name : names) {
+ auto it = mVendorParams.find(name);
+ if (it == mVendorParams.end()) {
+ ALOGD("%s is not a recognized vendor parameter; ignored.", name.c_str());
+ continue;
+ }
+ mSubscribedIndices.erase(it->second->index());
}
return subscribeToConfigUpdate(configurable, {}, blocking);
}
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index d9116f7..417b773 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -27,7 +27,6 @@
#include <C2Debug.h>
#include <codec2/hidl/client.h>
-#include <media/stagefright/foundation/ColorUtils.h>
#include <utils/RefBase.h>
#include "InputSurfaceWrapper.h"
@@ -36,6 +35,7 @@
namespace android {
struct AMessage;
+struct CodecParameterDescriptor;
class NativeHandle;
struct StandardParams;
@@ -125,7 +125,7 @@
std::shared_ptr<InputSurfaceWrapper> mInputSurface;
std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
- ColorAspects mClientColorAspects;
+ android_dataspace mInputSurfaceDataspace;
/// the current configuration. Updated after configure() and based on configUpdate in
/// onWorkDone
@@ -139,8 +139,8 @@
/// For now support a validation function.
std::map<C2Param::Index, LocalParamValidator> mLocalParams;
- /// Vendor field name -> index map.
- std::map<std::string, C2Param::Index> mVendorParamIndices;
+ /// Vendor field name -> desc map.
+ std::map<std::string, std::shared_ptr<C2ParamDescriptor>> mVendorParams;
std::set<std::string> mLastConfig;
@@ -328,6 +328,41 @@
return Watcher<T>(index, this);
}
+ /**
+ * Queries supported parameters and put the keys to |names|.
+ * TODO: currently this method queries vendor parameter keys only.
+ *
+ * \return OK if successful.
+ * BAD_VALUE if |names| is nullptr.
+ */
+ status_t querySupportedParameters(std::vector<std::string> *names);
+
+ /**
+ * Describe the parameter with |name|, filling the information into |desc|
+ * TODO: currently this method works only for vendor parameters.
+ *
+ * \return OK if successful.
+ * BAD_VALUE if |desc| is nullptr.
+ * NAME_NOT_FOUND if |name| is not a recognized parameter name.
+ */
+ status_t describe(const std::string &name, CodecParameterDescriptor *desc);
+
+ /**
+ * Find corresponding indices for |names| and subscribe to them.
+ */
+ status_t subscribeToVendorConfigUpdate(
+ const std::shared_ptr<Codec2Client::Configurable> &configurable,
+ const std::vector<std::string> &names,
+ c2_blocking_t blocking = C2_DONT_BLOCK);
+
+ /**
+ * Find corresponding indices for |names| and unsubscribe from them.
+ */
+ status_t unsubscribeFromVendorConfigUpdate(
+ const std::shared_ptr<Codec2Client::Configurable> &configurable,
+ const std::vector<std::string> &names,
+ c2_blocking_t blocking = C2_DONT_BLOCK);
+
private:
/// initializes the standard MediaCodec to Codec 2.0 params mapping
diff --git a/media/codec2/sfplugin/FrameReassembler.cpp b/media/codec2/sfplugin/FrameReassembler.cpp
index f8e6937..cf1be17 100644
--- a/media/codec2/sfplugin/FrameReassembler.cpp
+++ b/media/codec2/sfplugin/FrameReassembler.cpp
@@ -143,6 +143,7 @@
if (buffer->size() > 0) {
mCurrentOrdinal.timestamp = timeUs;
+ mCurrentOrdinal.customOrdinal = timeUs;
}
size_t frameSizeBytes = mFrameSize.value() * mChannelCount * bytesPerSample();
@@ -219,6 +220,7 @@
++mCurrentOrdinal.frameIndex;
mCurrentOrdinal.timestamp += mFrameSize.value() * 1000000 / mSampleRate;
+ mCurrentOrdinal.customOrdinal = mCurrentOrdinal.timestamp;
mCurrentBlock.reset();
mWriteView.reset();
}
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index bb7ca02..3ddae01 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -106,6 +106,9 @@
*/
virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
virtual android_dataspace getDataspace() { return mDataSpace; }
protected:
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
index 0ee9056..bc9197c 100644
--- a/media/codec2/sfplugin/PipelineWatcher.cpp
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -95,6 +95,7 @@
}
void PipelineWatcher::flush() {
+ ALOGV("flush");
mFramesInPipeline.clear();
}
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.cpp b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
index f39051b..d14b9b0 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.cpp
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.cpp
@@ -288,6 +288,20 @@
}
}
+C2FieldDescriptor::type_t ReflectedParamUpdater::getTypeForKey(
+ const std::string &key) const {
+ auto it = mMap.find(key);
+ if (it == mMap.end()) {
+ return C2FieldDescriptor::type_t(~0);
+ }
+
+ if (it->second.fieldDesc) {
+ return it->second.fieldDesc->type();
+ }
+ // whole param is exposed as a blob
+ return C2FieldDescriptor::BLOB;
+}
+
void ReflectedParamUpdater::updateParamsFromMessage(
const Dict ¶ms,
std::vector<std::unique_ptr<C2Param>> *vec /* nonnull */) const {
diff --git a/media/codec2/sfplugin/ReflectedParamUpdater.h b/media/codec2/sfplugin/ReflectedParamUpdater.h
index 752c7e4..6dcf2a3 100644
--- a/media/codec2/sfplugin/ReflectedParamUpdater.h
+++ b/media/codec2/sfplugin/ReflectedParamUpdater.h
@@ -176,6 +176,14 @@
std::vector<std::string> *keys /* nonnull */) const;
/**
+ * Get field type for the given name
+ *
+ * \param key[in] field name
+ * \return type of the field, or type_t(~0) if not found.
+ */
+ C2FieldDescriptor::type_t getTypeForKey(const std::string &name) const;
+
+ /**
* Update C2Param objects from field name and value in AMessage object.
*
* \param params[in] Dict object with field name to value pairs.
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index ba69d7e..ec18128 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -65,6 +65,12 @@
virtual void signalEndOfInputStream() override;
virtual void signalRequestIDRFrame() override;
+ virtual status_t querySupportedParameters(std::vector<std::string> *names) override;
+ virtual status_t describeParameter(
+ const std::string &name, CodecParameterDescriptor *desc) override;
+ virtual status_t subscribeToParameters(const std::vector<std::string> &names) override;
+ virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names) override;
+
void initiateReleaseIfStuck();
void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index c9caa01..7c660dc 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -208,6 +208,24 @@
.withSetter(Setter<C2StreamPixelAspectRatioInfo::output>)
.build());
+ if (isEncoder) {
+ addParameter(
+ DefineParam(mInputBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::input(0u))
+ .withFields({C2F(mInputBitrate, value).any()})
+ .withSetter(Setter<C2StreamBitrateInfo::input>)
+ .build());
+
+ addParameter(
+ DefineParam(mOutputBitrate, C2_PARAMKEY_BITRATE)
+ .withDefault(new C2StreamBitrateInfo::output(0u))
+ .withFields({C2F(mOutputBitrate, value).any()})
+ .calculatedAs(
+ Copy<C2StreamBitrateInfo::output, C2StreamBitrateInfo::input>,
+ mInputBitrate)
+ .build());
+ }
+
// TODO: more SDK params
}
private:
@@ -221,11 +239,19 @@
std::shared_ptr<C2StreamVendorInt64Info::output> mInt64Output;
std::shared_ptr<C2PortVendorStringInfo::input> mStringInput;
std::shared_ptr<C2StreamPixelAspectRatioInfo::output> mPixelAspectRatio;
+ std::shared_ptr<C2StreamBitrateInfo::input> mInputBitrate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
template<typename T>
static C2R Setter(bool, C2P<T> &) {
return C2R::Ok();
}
+
+ template<typename ME, typename DEP>
+ static C2R Copy(bool, C2P<ME> &me, const C2P<DEP> &dep) {
+ me.set().value = dep.v.value;
+ return C2R::Ok();
+ }
};
Impl mImpl;
@@ -457,4 +483,97 @@
<< "mInputFormat = " << mConfig.mInputFormat->debugString().c_str();
}
+TEST_F(CCodecConfigTest, DataspaceUpdate) {
+ init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_AVC);
+
+ ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+ class InputSurfaceStub : public InputSurfaceWrapper {
+ public:
+ ~InputSurfaceStub() override = default;
+ status_t connect(const std::shared_ptr<Codec2Client::Component> &) override {
+ return OK;
+ }
+ void disconnect() override {}
+ status_t start() override { return OK; }
+ status_t signalEndOfInputStream() override { return OK; }
+ status_t configure(Config &) override { return OK; }
+ };
+ mConfig.mInputSurface = std::make_shared<InputSurfaceStub>();
+
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_COLOR_RANGE, COLOR_RANGE_LIMITED);
+ format->setInt32(KEY_COLOR_STANDARD, COLOR_STANDARD_BT709);
+ format->setInt32(KEY_COLOR_TRANSFER, COLOR_TRANSFER_SDR_VIDEO);
+ format->setInt32(KEY_BIT_RATE, 100);
+
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+ mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+ ASSERT_TRUE(mConfig.updateConfiguration(configUpdate, D::ALL));
+
+ int32_t range{0};
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_RANGE_LIMITED, range)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ int32_t standard{0};
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_STANDARD_BT709, standard)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ int32_t transfer{0};
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_TRANSFER_SDR_VIDEO, transfer)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ mConfig.mInputSurface->setDataSpace(HAL_DATASPACE_BT2020_PQ);
+
+ // Dataspace from input surface should override the configured setting
+ mConfig.updateFormats(D::ALL);
+
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_RANGE_FULL, range)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ // Simulate bitrate update
+ format = new AMessage;
+ format->setInt32(KEY_BIT_RATE, 200);
+ configUpdate.clear();
+ ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(
+ mConfigurable, format, D::ALL, C2_MAY_BLOCK, &configUpdate));
+ ASSERT_EQ(OK, mConfig.setParameters(mConfigurable, configUpdate, C2_MAY_BLOCK));
+
+ // Color information should remain the same
+ mConfig.updateFormats(D::ALL);
+
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_RANGE, &range))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_RANGE_FULL, range)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_STANDARD, &standard))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_STANDARD_BT2020, standard)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+
+ ASSERT_TRUE(mConfig.mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &transfer))
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+ EXPECT_EQ(COLOR_TRANSFER_ST2084, transfer)
+ << "mOutputFormat = " << mConfig.mOutputFormat->debugString().c_str();
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 74e7ef1..2f4d6b1 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -33,11 +33,13 @@
"libcodec2_vndk",
"libcutils",
"liblog",
+ "libnativewindow",
"libstagefright_foundation",
"libutils",
],
static_libs: [
+ "libarect",
"libyuv_static",
],
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bf2a07e..a78d811 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -23,6 +23,7 @@
#include <list>
#include <mutex>
+#include <android/hardware_buffer.h>
#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/foundation/AUtils.h>
@@ -121,32 +122,69 @@
if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
return BAD_VALUE;
}
- if ((IsNV12(view) && IsI420(img)) || (IsI420(view) && IsNV12(img))) {
- // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
- const uint8_t* src_y = view.data()[0];
- const uint8_t* src_u = view.data()[1];
- const uint8_t* src_v = view.data()[2];
- int32_t src_stride_y = view.layout().planes[0].rowInc;
- int32_t src_stride_u = view.layout().planes[1].rowInc;
- int32_t src_stride_v = view.layout().planes[2].rowInc;
- uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
- uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
- uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
- int32_t dst_stride_y = img->mPlane[0].mRowInc;
- int32_t dst_stride_u = img->mPlane[1].mRowInc;
- int32_t dst_stride_v = img->mPlane[2].mRowInc;
- if (IsNV12(view) && IsI420(img)) {
+ const uint8_t* src_y = view.data()[0];
+ const uint8_t* src_u = view.data()[1];
+ const uint8_t* src_v = view.data()[2];
+ int32_t src_stride_y = view.layout().planes[0].rowInc;
+ int32_t src_stride_u = view.layout().planes[1].rowInc;
+ int32_t src_stride_v = view.layout().planes[2].rowInc;
+ uint8_t* dst_y = imgBase + img->mPlane[0].mOffset;
+ uint8_t* dst_u = imgBase + img->mPlane[1].mOffset;
+ uint8_t* dst_v = imgBase + img->mPlane[2].mOffset;
+ int32_t dst_stride_y = img->mPlane[0].mRowInc;
+ int32_t dst_stride_u = img->mPlane[1].mRowInc;
+ int32_t dst_stride_v = img->mPlane[2].mRowInc;
+ int width = view.crop().width;
+ int height = view.crop().height;
+
+ if (IsNV12(view)) {
+ if (IsNV12(img)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+ return OK;
+ } else if (IsNV21(img)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(img)) {
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
- dst_u, dst_stride_u, dst_v, dst_stride_v, view.crop().width,
- view.crop().height)) {
+ dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
}
- } else {
+ }
+ } else if (IsNV21(view)) {
+ if (IsNV12(img)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+ return OK;
+ }
+ } else if (IsNV21(img)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+ return OK;
+ } else if (IsI420(img)) {
+ if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+ dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ }
+ } else if (IsI420(view)) {
+ if (IsNV12(img)) {
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
- dst_y, dst_stride_y, dst_u, dst_stride_u, view.crop().width,
- view.crop().height)) {
+ dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
+ } else if (IsNV21(img)) {
+ if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(img)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+ return OK;
}
}
return _ImageCopy<true>(view, img, imgBase);
@@ -156,32 +194,68 @@
if (view.crop().width != img->mWidth || view.crop().height != img->mHeight) {
return BAD_VALUE;
}
- if ((IsNV12(img) && IsI420(view)) || (IsI420(img) && IsNV12(view))) {
- // Take shortcuts to use libyuv functions between NV12 and I420 conversion.
- const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
- const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
- const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
- int32_t src_stride_y = img->mPlane[0].mRowInc;
- int32_t src_stride_u = img->mPlane[1].mRowInc;
- int32_t src_stride_v = img->mPlane[2].mRowInc;
- uint8_t* dst_y = view.data()[0];
- uint8_t* dst_u = view.data()[1];
- uint8_t* dst_v = view.data()[2];
- int32_t dst_stride_y = view.layout().planes[0].rowInc;
- int32_t dst_stride_u = view.layout().planes[1].rowInc;
- int32_t dst_stride_v = view.layout().planes[2].rowInc;
- if (IsNV12(img) && IsI420(view)) {
+ const uint8_t* src_y = imgBase + img->mPlane[0].mOffset;
+ const uint8_t* src_u = imgBase + img->mPlane[1].mOffset;
+ const uint8_t* src_v = imgBase + img->mPlane[2].mOffset;
+ int32_t src_stride_y = img->mPlane[0].mRowInc;
+ int32_t src_stride_u = img->mPlane[1].mRowInc;
+ int32_t src_stride_v = img->mPlane[2].mRowInc;
+ uint8_t* dst_y = view.data()[0];
+ uint8_t* dst_u = view.data()[1];
+ uint8_t* dst_v = view.data()[2];
+ int32_t dst_stride_y = view.layout().planes[0].rowInc;
+ int32_t dst_stride_u = view.layout().planes[1].rowInc;
+ int32_t dst_stride_v = view.layout().planes[2].rowInc;
+ int width = view.crop().width;
+ int height = view.crop().height;
+ if (IsNV12(img)) {
+ if (IsNV12(view)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
+ return OK;
+ } else if (IsNV21(view)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(view)) {
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
- dst_u, dst_stride_u, dst_v, dst_stride_v, view.width(),
- view.height())) {
+ dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
}
- } else {
+ }
+ } else if (IsNV21(img)) {
+ if (IsNV12(view)) {
+ if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
+ return OK;
+ }
+ } else if (IsNV21(view)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
+ return OK;
+ } else if (IsI420(view)) {
+ if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
+ dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ }
+ } else if (IsI420(img)) {
+ if (IsNV12(view)) {
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
- dst_y, dst_stride_y, dst_u, dst_stride_u, view.width(),
- view.height())) {
+ dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
+ } else if (IsNV21(view)) {
+ if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
+ return OK;
+ }
+ } else if (IsI420(view)) {
+ libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
+ libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
+ libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
+ return OK;
}
}
return _ImageCopy<false>(view, img, imgBase);
@@ -225,6 +299,20 @@
&& layout.planes[layout.PLANE_V].offset == 1);
}
+bool IsNV21(const C2GraphicView &view) {
+ if (!IsYUV420(view)) {
+ return false;
+ }
+ const C2PlanarLayout &layout = view.layout();
+ return (layout.rootPlanes == 2
+ && layout.planes[layout.PLANE_U].colInc == 2
+ && layout.planes[layout.PLANE_U].rootIx == layout.PLANE_V
+ && layout.planes[layout.PLANE_U].offset == 1
+ && layout.planes[layout.PLANE_V].colInc == 2
+ && layout.planes[layout.PLANE_V].rootIx == layout.PLANE_V
+ && layout.planes[layout.PLANE_V].offset == 0);
+}
+
bool IsI420(const C2GraphicView &view) {
if (!IsYUV420(view)) {
return false;
@@ -261,6 +349,15 @@
&& (img->mPlane[2].mOffset - img->mPlane[1].mOffset == 1));
}
+bool IsNV21(const MediaImage2 *img) {
+ if (!IsYUV420(img)) {
+ return false;
+ }
+ return (img->mPlane[1].mColInc == 2
+ && img->mPlane[2].mColInc == 2
+ && (img->mPlane[1].mOffset - img->mPlane[2].mOffset == 1));
+}
+
bool IsI420(const MediaImage2 *img) {
if (!IsYUV420(img)) {
return false;
@@ -270,6 +367,76 @@
&& img->mPlane[2].mOffset > img->mPlane[1].mOffset);
}
+FlexLayout GetYuv420FlexibleLayout() {
+ static FlexLayout sLayout = []{
+ AHardwareBuffer_Desc desc = {
+ 16, // width
+ 16, // height
+ 1, // layers
+ AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420,
+ AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+ 0, // stride
+ 0, // rfu0
+ 0, // rfu1
+ };
+ AHardwareBuffer *buffer = nullptr;
+ int ret = AHardwareBuffer_allocate(&desc, &buffer);
+ if (ret != 0) {
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ class AutoCloser {
+ public:
+ AutoCloser(AHardwareBuffer *buffer) : mBuffer(buffer), mLocked(false) {}
+ ~AutoCloser() {
+ if (mLocked) {
+ AHardwareBuffer_unlock(mBuffer, nullptr);
+ }
+ AHardwareBuffer_release(mBuffer);
+ }
+
+ void setLocked() { mLocked = true; }
+
+ private:
+ AHardwareBuffer *mBuffer;
+ bool mLocked;
+ } autoCloser(buffer);
+ AHardwareBuffer_Planes planes;
+ ret = AHardwareBuffer_lockPlanes(
+ buffer,
+ AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
+ -1, // fence
+ nullptr, // rect
+ &planes);
+ if (ret != 0) {
+ AHardwareBuffer_release(buffer);
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ autoCloser.setLocked();
+ if (planes.planeCount != 3) {
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ if (planes.planes[0].pixelStride != 1) {
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ if (planes.planes[1].pixelStride == 1 && planes.planes[2].pixelStride == 1) {
+ return FLEX_LAYOUT_PLANAR;
+ }
+ if (planes.planes[1].pixelStride == 2 && planes.planes[2].pixelStride == 2) {
+ ssize_t uvDist =
+ static_cast<uint8_t *>(planes.planes[2].data) -
+ static_cast<uint8_t *>(planes.planes[1].data);
+ if (uvDist == 1) {
+ return FLEX_LAYOUT_SEMIPLANAR_UV;
+ } else if (uvDist == -1) {
+ return FLEX_LAYOUT_SEMIPLANAR_VU;
+ }
+ return FLEX_LAYOUT_UNKNOWN;
+ }
+ return FLEX_LAYOUT_UNKNOWN;
+ }();
+ return sLayout;
+}
+
MediaImage2 CreateYUV420PlanarMediaImage2(
uint32_t width, uint32_t height, uint32_t stride, uint32_t vstride) {
return MediaImage2 {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index afadf00..af29e81 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -96,6 +96,11 @@
bool IsNV12(const C2GraphicView &view);
/**
+ * Returns true iff a view has a NV21 layout.
+ */
+bool IsNV21(const C2GraphicView &view);
+
+/**
* Returns true iff a view has a I420 layout.
*/
bool IsI420(const C2GraphicView &view);
@@ -111,10 +116,26 @@
bool IsNV12(const MediaImage2 *img);
/**
+ * Returns true iff a MediaImage2 has a NV21 layout.
+ */
+bool IsNV21(const MediaImage2 *img);
+
+/**
* Returns true iff a MediaImage2 has a I420 layout.
*/
bool IsI420(const MediaImage2 *img);
+enum FlexLayout {
+ FLEX_LAYOUT_UNKNOWN,
+ FLEX_LAYOUT_PLANAR,
+ FLEX_LAYOUT_SEMIPLANAR_UV,
+ FLEX_LAYOUT_SEMIPLANAR_VU,
+};
+/**
+ * Returns layout of YCBCR_420_888 pixel format.
+ */
+FlexLayout GetYuv420FlexibleLayout();
+
/**
* A raw memory block to use for internal buffers.
*
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index 85623b8..a8528df 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -30,10 +30,15 @@
#include <C2ErrnoUtils.h>
#include <C2HandleIonInternal.h>
+#include <android-base/properties.h>
+
namespace android {
namespace {
constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+ // max padding after ion/dmabuf allocations in bytes
+ constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
}
/* size_t <=> int(lo), int(hi) conversions */
@@ -376,14 +381,34 @@
unsigned heapMask, unsigned flags, C2Allocator::id_t id) {
int bufferFd = -1;
ion_user_handle_t buffer = -1;
- size_t alignedSize = align == 0 ? size : (size + align - 1) & ~(align - 1);
+ // NOTE: read this property directly from the property as this code has to run on
+ // Android Q, but the sysprop was only introduced in Android S.
+ static size_t sPadding =
+ base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+ if (sPadding > SIZE_MAX - size) {
+ ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx", size, sPadding);
+ // use ImplV2 as there is no allocation anyways
+ return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+ }
+
+ size_t allocSize = size + sPadding;
+ if (align) {
+ if (align - 1 > SIZE_MAX - allocSize) {
+ ALOGD("ion_alloc: size %#zx cannot accommodate padding %#zx and alignment %#zx",
+ size, sPadding, align);
+ // use ImplV2 as there is no allocation anyways
+ return new ImplV2(ionFd, size, -1, id, -ENOMEM);
+ }
+ allocSize += align - 1;
+ allocSize &= ~(align - 1);
+ }
int ret;
if (ion_is_legacy(ionFd)) {
- ret = ion_alloc(ionFd, alignedSize, align, heapMask, flags, &buffer);
+ ret = ion_alloc(ionFd, allocSize, align, heapMask, flags, &buffer);
ALOGV("ion_alloc(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
"returned (%d) ; buffer = %d",
- ionFd, alignedSize, align, heapMask, flags, ret, buffer);
+ ionFd, allocSize, align, heapMask, flags, ret, buffer);
if (ret == 0) {
// get buffer fd for native handle constructor
ret = ion_share(ionFd, buffer, &bufferFd);
@@ -392,15 +417,15 @@
buffer = -1;
}
}
- return new Impl(ionFd, alignedSize, bufferFd, buffer, id, ret);
+ return new Impl(ionFd, allocSize, bufferFd, buffer, id, ret);
} else {
- ret = ion_alloc_fd(ionFd, alignedSize, align, heapMask, flags, &bufferFd);
+ ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
"returned (%d) ; bufferFd = %d",
- ionFd, alignedSize, align, heapMask, flags, ret, bufferFd);
+ ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
- return new ImplV2(ionFd, alignedSize, bufferFd, id, ret);
+ return new ImplV2(ionFd, allocSize, bufferFd, id, ret);
}
}
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 750aa31..6d8552a 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -16,11 +16,13 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2DmaBufAllocator"
+
#include <BufferAllocator/BufferAllocator.h>
#include <C2Buffer.h>
#include <C2Debug.h>
#include <C2DmaBufAllocator.h>
#include <C2ErrnoUtils.h>
+
#include <linux/ion.h>
#include <sys/mman.h>
#include <unistd.h> // getpagesize, size_t, close, dup
@@ -28,14 +30,15 @@
#include <list>
-#ifdef __ANDROID_APEX__
#include <android-base/properties.h>
-#endif
namespace android {
namespace {
-constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+ constexpr size_t USAGE_LRU_CACHE_SIZE = 1024;
+
+ // max padding after ion/dmabuf allocations in bytes
+ constexpr uint32_t MAX_PADDING = 0x8000; // 32KB
}
/* =========================== BUFFER HANDLE =========================== */
@@ -250,8 +253,11 @@
int ret = 0;
bufferFd = alloc.Alloc(heap_name, size, flags);
- if (bufferFd < 0) ret = bufferFd;
+ if (bufferFd < 0) {
+ ret = bufferFd;
+ }
+ // this may be a non-working handle if bufferFd is negative
mHandle = C2HandleBuf(bufferFd, size);
mId = id;
mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
@@ -360,8 +366,22 @@
return ret;
}
+ // TODO: should we pad before mapping usage?
+
+ // NOTE: read this property directly from the property as this code has to run on
+ // Android Q, but the sysprop was only introduced in Android S.
+ static size_t sPadding =
+ base::GetUintProperty("media.c2.dmabuf.padding", (uint32_t)0, MAX_PADDING);
+ if (sPadding > SIZE_MAX - capacity) {
+ // size would overflow
+ ALOGD("dmabuf_alloc: size #%x cannot accommodate padding #%zx", capacity, sPadding);
+ return C2_NO_MEMORY;
+ }
+
+ size_t allocSize = (size_t)capacity + sPadding;
+ // TODO: should we align allocation size to mBlockSize to reflect the true allocation size?
std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
- mBufferAllocator, capacity, heap_name, flags, getId());
+ mBufferAllocator, allocSize, heap_name, flags, getId());
ret = alloc->status();
if (ret == C2_OK) {
*allocation = alloc;
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index 409fca1..1bc8c63 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -4505,6 +4505,9 @@
if (objectTypeIndication == 0x6B || objectTypeIndication == 0x69) {
// mp3 audio
+ if (mLastTrack == NULL)
+ return ERROR_MALFORMED;
+
AMediaFormat_setString(mLastTrack->meta,AMEDIAFORMAT_KEY_MIME, MEDIA_MIMETYPE_AUDIO_MPEG);
return OK;
}
@@ -4595,6 +4598,10 @@
if (offset >= csd_size || csd[offset] != 0x01) {
return ERROR_MALFORMED;
}
+
+ if (mLastTrack == NULL) {
+ return ERROR_MALFORMED;
+ }
// formerly kKeyVorbisInfo
AMediaFormat_setBuffer(mLastTrack->meta,
AMEDIAFORMAT_KEY_CSD_0, &csd[offset], len1);
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 19d1d1a..e37cc12 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1249,6 +1249,46 @@
return finalBufferSize;
}
+ssize_t AudioTrack::getStartThresholdInFrames() const
+{
+ AutoMutex lock(mLock);
+ if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+ return NO_INIT;
+ }
+ return (ssize_t) mProxy->getStartThresholdInFrames();
+}
+
+ssize_t AudioTrack::setStartThresholdInFrames(size_t startThresholdInFrames)
+{
+ if (startThresholdInFrames > INT32_MAX || startThresholdInFrames == 0) {
+ // contractually we could simply return the current threshold in frames
+ // to indicate the request was ignored, but we return an error here.
+ return BAD_VALUE;
+ }
+ AutoMutex lock(mLock);
+ // We do not permit calling setStartThresholdInFrames() between the AudioTrack
+ // default ctor AudioTrack() and set(...) but rather fail such an attempt.
+ // (To do so would require a cached mOrigStartThresholdInFrames and we may
+ // not have proper validation for the actual set value).
+ if (mOutput == AUDIO_IO_HANDLE_NONE || mProxy.get() == 0) {
+ return NO_INIT;
+ }
+ const uint32_t original = mProxy->getStartThresholdInFrames();
+ const uint32_t final = mProxy->setStartThresholdInFrames(startThresholdInFrames);
+ if (original != final) {
+ android::mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD)
+ .set(AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES, (int32_t)final)
+ .record();
+ if (original > final) {
+ // restart track if it was disabled by audioflinger due to previous underrun
+ // and we reduced the number of frames for the threshold.
+ restartIfDisabled();
+ }
+ }
+ return final;
+}
+
status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
{
if (mSharedBuffer == 0 || isOffloadedOrDirect()) {
@@ -2562,6 +2602,10 @@
staticPosition = mStaticProxy->getPosition().unsignedValue();
}
+ // save the old startThreshold and framecount
+ const uint32_t originalStartThresholdInFrames = mProxy->getStartThresholdInFrames();
+ const uint32_t originalFrameCount = mProxy->frameCount();
+
// See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
// causes a lot of churn on the service side, and it can reject starting
// playback of a previously created track. May also apply to other cases.
@@ -2616,6 +2660,18 @@
return mAudioTrack->applyVolumeShaper(shaper.mConfiguration, operationToEnd);
});
+ // restore the original start threshold if different than frameCount.
+ if (originalStartThresholdInFrames != originalFrameCount) {
+ // Note: mProxy->setStartThresholdInFrames() call is in the Proxy
+ // and does not trigger a restart.
+ // (Also CBLK_DISABLED is not set, buffers are empty after track recreation).
+ // Any start would be triggered on the mState == ACTIVE check below.
+ const uint32_t currentThreshold =
+ mProxy->setStartThresholdInFrames(originalStartThresholdInFrames);
+ ALOGD_IF(originalStartThresholdInFrames != currentThreshold,
+ "%s(%d) startThresholdInFrames changing from %u to %u",
+ __func__, mPortId, originalStartThresholdInFrames, currentThreshold);
+ }
if (mState == STATE_ACTIVE) {
result = mAudioTrack->start();
}
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index f1f8f9c..35719be 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "AudioTrackShared"
//#define LOG_NDEBUG 0
+#include <atomic>
#include <android-base/macros.h>
#include <private/media/AudioTrackShared.h>
#include <utils/Log.h>
@@ -33,6 +34,21 @@
return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
}
+// compile-time safe atomics. TODO: update all methods to use it
+template <typename T>
+T android_atomic_load(const volatile T* addr) {
+ static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+ static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+ return atomic_load((std::atomic<T>*)addr); // memory_order_seq_cst
+}
+
+template <typename T>
+void android_atomic_store(const volatile T* addr, T value) {
+ static_assert(sizeof(T) == sizeof(std::atomic<T>)); // no extra sync data required.
+ static_assert(std::atomic<T>::is_always_lock_free); // no hash lock somewhere.
+ atomic_store((std::atomic<T>*)addr, value); // memory_order_seq_cst
+}
+
// incrementSequence is used to determine the next sequence value
// for the loop and position sequence counters. It should return
// a value between "other" + 1 and "other" + INT32_MAX, the choice of
@@ -51,6 +67,7 @@
: mServer(0), mFutex(0), mMinimum(0)
, mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0)
, mBufferSizeInFrames(0)
+ , mStartThresholdInFrames(0) // filled in by the server.
, mFlags(0)
{
memset(&u, 0, sizeof(u));
@@ -66,6 +83,26 @@
{
}
+uint32_t Proxy::getStartThresholdInFrames() const
+{
+ const uint32_t startThresholdInFrames =
+ android_atomic_load(&mCblk->mStartThresholdInFrames);
+ if (startThresholdInFrames == 0 || startThresholdInFrames > mFrameCount) {
+ ALOGD("%s: startThresholdInFrames %u not between 1 and frameCount %zu, "
+ "setting to frameCount",
+ __func__, startThresholdInFrames, mFrameCount);
+ return mFrameCount;
+ }
+ return startThresholdInFrames;
+}
+
+uint32_t Proxy::setStartThresholdInFrames(uint32_t startThresholdInFrames)
+{
+ const uint32_t actual = std::min((size_t)startThresholdInFrames, frameCount());
+ android_atomic_store(&mCblk->mStartThresholdInFrames, actual);
+ return actual;
+}
+
// ---------------------------------------------------------------------------
ClientProxy::ClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount,
@@ -663,6 +700,7 @@
, mTimestampMutator(&cblk->mExtendedTimestampQueue)
{
cblk->mBufferSizeInFrames = frameCount;
+ cblk->mStartThresholdInFrames = frameCount;
}
__attribute__((no_sanitize("integer")))
@@ -900,11 +938,8 @@
}
audio_track_cblk_t* cblk = mCblk;
- int32_t flush = cblk->u.mStreaming.mFlush;
- if (flush != mFlush) {
- // FIXME should return an accurate value, but over-estimate is better than under-estimate
- return mFrameCount;
- }
+ flushBufferIfNeeded();
+
const int32_t rear = getRear();
ssize_t filled = audio_utils::safe_sub_overflow(rear, cblk->u.mStreaming.mFront);
// pipe should not already be overfull
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index fac4c83..4afa9c9 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -429,6 +429,19 @@
*/
ssize_t setBufferSizeInFrames(size_t size);
+ /* Returns the start threshold on the buffer for audio streaming
+ * or a negative value if the AudioTrack is not initialized.
+ */
+ ssize_t getStartThresholdInFrames() const;
+
+ /* Sets the start threshold in frames on the buffer for audio streaming.
+ *
+ * May be clamped internally. Returns the actual value set, or a negative
+ * value if the AudioTrack is not initialized or if the input
+ * is zero or greater than INT_MAX.
+ */
+ ssize_t setStartThresholdInFrames(size_t startThresholdInFrames);
+
/* Return the static buffer specified in constructor or set(), or 0 for streaming mode */
sp<IMemory> sharedBuffer() const { return mSharedBuffer; }
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index da16477..ca4f663 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -246,6 +246,10 @@
return status;
}
CoreUtils::AudioInputFlags hidlFlags;
+#if MAJOR_VERSION <= 5
+ // Some flags were specific to framework and must not leak to the HAL.
+ flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
+#endif
if (status_t status = CoreUtils::audioInputFlagsFromHal(flags, &hidlFlags); status != OK) {
return status;
}
@@ -278,10 +282,6 @@
sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
}
#endif
-#if MAJOR_VERSION <= 5
- // Some flags were specific to framework and must not leak to the HAL.
- flags = static_cast<audio_input_flags_t>(flags & ~AUDIO_INPUT_FLAG_DIRECT);
-#endif
Return<void> ret = mDevice->openInputStream(
handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
[&](Result r, const sp<IStreamIn>& result, const AudioConfig& suggestedConfig) {
@@ -354,7 +354,8 @@
return processReturn("releaseAudioPatch", mDevice->releaseAudioPatch(patch));
}
-status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+template <typename HalPort>
+status_t DeviceHalHidl::getAudioPortImpl(HalPort *port) {
if (mDevice == 0) return NO_INIT;
AudioPort hidlPort;
HidlUtils::audioPortFromHal(*port, &hidlPort);
@@ -370,6 +371,30 @@
return processReturn("getAudioPort", ret, retval);
}
+status_t DeviceHalHidl::getAudioPort(struct audio_port *port) {
+ return getAudioPortImpl(port);
+}
+
+status_t DeviceHalHidl::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+ return getAudioPortImpl(port);
+#else
+ struct audio_port audioPort = {};
+ status_t result = NO_ERROR;
+ if (!audio_populate_audio_port(port, &audioPort)) {
+ ALOGE("Failed to populate legacy audio port from audio_port_v7");
+ result = BAD_VALUE;
+ }
+ status_t status = getAudioPort(&audioPort);
+ if (status == NO_ERROR) {
+ audio_populate_audio_port_v7(&audioPort, port);
+ } else {
+ result = status;
+ }
+ return result;
+#endif
+}
+
status_t DeviceHalHidl::setAudioPortConfig(const struct audio_port_config *config) {
if (mDevice == 0) return NO_INIT;
AudioPortConfig hidlConfig;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index d342d4a..2c847cf 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -107,6 +107,9 @@
// Fills the list of supported attributes for a given audio port.
virtual status_t getAudioPort(struct audio_port *port);
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
+
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
@@ -128,6 +131,8 @@
// The destructor automatically closes the device.
virtual ~DeviceHalHidl();
+
+ template <typename HalPort> status_t getAudioPortImpl(HalPort *port);
};
} // namespace CPP_VERSION
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
index 8021d92..af7dc1a 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ b/media/libaudiohal/impl/DeviceHalLocal.cpp
@@ -180,6 +180,22 @@
return mDev->get_audio_port(mDev, port);
}
+status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
+#if MAJOR_VERSION >= 7
+ if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
+ // get_audio_port_v7 is mandatory if legacy HAL support this API version.
+ return mDev->get_audio_port_v7(mDev, port);
+ }
+#endif
+ struct audio_port audioPort = {};
+ audio_populate_audio_port(port, &audioPort);
+ status_t status = getAudioPort(&audioPort);
+ if (status == NO_ERROR) {
+ audio_populate_audio_port_v7(&audioPort, port);
+ }
+ return status;
+}
+
status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
return mDev->set_audio_port_config(mDev, config);
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
index b4eeba5..46b510b 100644
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ b/media/libaudiohal/impl/DeviceHalLocal.h
@@ -100,6 +100,9 @@
// Fills the list of supported attributes for a given audio port.
virtual status_t getAudioPort(struct audio_port *port);
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port_v7 *port);
+
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 6da8bbd..f4a4fe1 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -57,8 +57,7 @@
// Note: This assumes channel mask, format, and sample rate do not change after creation.
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
if (/* mStreamPowerLog.isUserDebugOrEngBuild() && */
- StreamHalHidl::getAudioProperties(
- &config.sample_rate, &config.channel_mask, &config.format) == NO_ERROR) {
+ StreamHalHidl::getAudioProperties(&config) == NO_ERROR) {
mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
}
}
@@ -67,14 +66,6 @@
mStream = nullptr;
}
-// Note: this method will be removed
-status_t StreamHalHidl::getSampleRate(uint32_t *rate) {
- audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
- status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
- *rate = config.sample_rate;
- return status;
-}
-
status_t StreamHalHidl::getBufferSize(size_t *size) {
if (!mStream) return NO_INIT;
status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
@@ -84,48 +75,28 @@
return status;
}
-// Note: this method will be removed
-status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
- audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
- status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
- *mask = config.channel_mask;
- return status;
-}
-
-// Note: this method will be removed
-status_t StreamHalHidl::getFormat(audio_format_t *format) {
- audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
- status_t status = getAudioProperties(&config.sample_rate, &config.channel_mask, &config.format);
- *format = config.format;
- return status;
-}
-
-status_t StreamHalHidl::getAudioProperties(
- uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+status_t StreamHalHidl::getAudioProperties(audio_config_base_t *configBase) {
+ *configBase = AUDIO_CONFIG_BASE_INITIALIZER;
if (!mStream) return NO_INIT;
#if MAJOR_VERSION <= 6
Return<void> ret = mStream->getAudioProperties(
[&](uint32_t sr, auto m, auto f) {
- *sampleRate = sr;
- *mask = static_cast<audio_channel_mask_t>(m);
- *format = static_cast<audio_format_t>(f);
+ configBase->sample_rate = sr;
+ configBase->channel_mask = static_cast<audio_channel_mask_t>(m);
+ configBase->format = static_cast<audio_format_t>(f);
});
return processReturn("getAudioProperties", ret);
#else
Result retval;
status_t conversionStatus = BAD_VALUE;
- audio_config_base_t halConfig = AUDIO_CONFIG_BASE_INITIALIZER;
Return<void> ret = mStream->getAudioProperties(
[&](Result r, const AudioConfigBase& config) {
retval = r;
if (retval == Result::OK) {
- conversionStatus = HidlUtils::audioConfigBaseToHal(config, &halConfig);
+ conversionStatus = HidlUtils::audioConfigBaseToHal(config, configBase);
}
});
if (status_t status = processReturn("getAudioProperties", ret, retval); status == NO_ERROR) {
- *sampleRate = halConfig.sample_rate;
- *mask = halConfig.channel_mask;
- *format = halConfig.format;
return conversionStatus;
} else {
return status;
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 72ce60b..d40fa7c 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -48,21 +48,14 @@
class StreamHalHidl : public virtual StreamHalInterface, public ConversionHelperHidl
{
public:
- // Return the sampling rate in Hz - eg. 44100.
- virtual status_t getSampleRate(uint32_t *rate);
-
// Return size of input/output buffer in bytes for this stream - eg. 4800.
virtual status_t getBufferSize(size_t *size);
- // Return the channel mask.
- virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
- // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
- virtual status_t getFormat(audio_format_t *format);
-
- // Convenience method.
- virtual status_t getAudioProperties(
- uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+ // Return the base configuration of the stream:
+ // - channel mask;
+ // - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+ // - sampling rate in Hz - eg. 44100.
+ virtual status_t getAudioProperties(audio_config_base_t *configBase);
// Set audio stream parameters.
virtual status_t setParameters(const String8& kvPairs);
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
index e89b288..d0c375e 100644
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ b/media/libaudiohal/impl/StreamHalLocal.cpp
@@ -45,31 +45,15 @@
mDevice.clear();
}
-status_t StreamHalLocal::getSampleRate(uint32_t *rate) {
- *rate = mStream->get_sample_rate(mStream);
- return OK;
-}
-
status_t StreamHalLocal::getBufferSize(size_t *size) {
*size = mStream->get_buffer_size(mStream);
return OK;
}
-status_t StreamHalLocal::getChannelMask(audio_channel_mask_t *mask) {
- *mask = mStream->get_channels(mStream);
- return OK;
-}
-
-status_t StreamHalLocal::getFormat(audio_format_t *format) {
- *format = mStream->get_format(mStream);
- return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(
- uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
- *sampleRate = mStream->get_sample_rate(mStream);
- *mask = mStream->get_channels(mStream);
- *format = mStream->get_format(mStream);
+status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
+ configBase->sample_rate = mStream->get_sample_rate(mStream);
+ configBase->channel_mask = mStream->get_channels(mStream);
+ configBase->format = mStream->get_format(mStream);
return OK;
}
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
index e228104..b260495 100644
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ b/media/libaudiohal/impl/StreamHalLocal.h
@@ -28,21 +28,14 @@
class StreamHalLocal : public virtual StreamHalInterface
{
public:
- // Return the sampling rate in Hz - eg. 44100.
- virtual status_t getSampleRate(uint32_t *rate);
-
// Return size of input/output buffer in bytes for this stream - eg. 4800.
virtual status_t getBufferSize(size_t *size);
- // Return the channel mask.
- virtual status_t getChannelMask(audio_channel_mask_t *mask);
-
- // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
- virtual status_t getFormat(audio_format_t *format);
-
- // Convenience method.
- virtual status_t getAudioProperties(
- uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format);
+ // Return the base configuration of the stream:
+ // - channel mask;
+ // - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+ // - sampling rate in Hz - eg. 44100.
+ virtual status_t getAudioProperties(audio_config_base_t *configBase);
// Set audio stream parameters.
virtual status_t setParameters(const String8& kvPairs);
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 1e04b21..29ef011 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -106,6 +106,9 @@
// Fills the list of supported attributes for a given audio port.
virtual status_t getAudioPort(struct audio_port *port) = 0;
+ // Fills the list of supported attributes for a given audio port.
+ virtual status_t getAudioPort(struct audio_port_v7 *port) = 0;
+
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index b47f536..2be12fb 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -31,25 +31,27 @@
class StreamHalInterface : public virtual RefBase
{
public:
- // TODO(mnaganov): Remove
- // Return the sampling rate in Hz - eg. 44100.
- virtual status_t getSampleRate(uint32_t *rate) = 0;
-
// Return size of input/output buffer in bytes for this stream - eg. 4800.
virtual status_t getBufferSize(size_t *size) = 0;
- // TODO(mnaganov): Remove
- // Return the channel mask.
- virtual status_t getChannelMask(audio_channel_mask_t *mask) = 0;
+ // Return the base configuration of the stream:
+ // - channel mask;
+ // - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
+ // - sampling rate in Hz - eg. 44100.
+ virtual status_t getAudioProperties(audio_config_base_t *configBase) = 0;
- // TODO(mnaganov): Remove
- // Return the audio format - e.g. AUDIO_FORMAT_PCM_16_BIT.
- virtual status_t getFormat(audio_format_t *format) = 0;
-
- // TODO(mnaganov): Change to use audio_config_base_t
// Convenience method.
- virtual status_t getAudioProperties(
- uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) = 0;
+ inline status_t getAudioProperties(
+ uint32_t *sampleRate, audio_channel_mask_t *mask, audio_format_t *format) {
+ audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+ const status_t result = getAudioProperties(&config);
+ if (result == NO_ERROR) {
+ if (sampleRate != nullptr) *sampleRate = config.sample_rate;
+ if (mask != nullptr) *mask = config.channel_mask;
+ if (format != nullptr) *format = config.format;
+ }
+ return result;
+ }
// Set audio stream parameters.
virtual status_t setParameters(const String8& kvPairs) = 0;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
index 20058a1..4eea04f 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.cpp
@@ -23,6 +23,7 @@
#include <system/audio.h>
#include "LVM_Private.h"
+#include "ScalarArithmetic.h"
#include "VectorArithmetic.h"
#include "LVM_Coeffs.h"
@@ -178,6 +179,9 @@
* Apply the filter
*/
pInstance->pTEBiquad->process(pProcessed, pProcessed, NrFrames);
+ for (auto i = 0; i < NrChannels * NrFrames; i++) {
+ pProcessed[i] = LVM_Clamp(pProcessed[i]);
+ }
}
/*
* Volume balance
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index df7ca5a..7571a24 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -53,16 +53,16 @@
flags_arr=(
"-csE"
"-eqE"
- "-tE"
- "-csE -tE -eqE"
+ "-tE -trebleLvl:15"
+ "-csE -tE -trebleLvl:15 -eqE"
"-bE -M"
- "-csE -tE"
- "-csE -eqE" "-tE -eqE"
- "-csE -tE -bE -M -eqE"
- "-tE -eqE -vcBal:96 -M"
- "-tE -eqE -vcBal:-96 -M"
- "-tE -eqE -vcBal:0 -M"
- "-tE -eqE -bE -vcBal:30 -M"
+ "-csE -tE -trebleLvl:15"
+ "-csE -eqE" "-tE -trebleLvl:15 -eqE"
+ "-csE -tE -trebleLvl:15 -bE -M -eqE"
+ "-tE -trebleLvl:15 -eqE -vcBal:96 -M"
+ "-tE -trebleLvl:15 -eqE -vcBal:-96 -M"
+ "-tE -trebleLvl:15 -eqE -vcBal:0 -M"
+ "-tE -trebleLvl:15 -eqE -bE -vcBal:30 -M"
)
fs_arr=(
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index e484a1a..e65228c 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -79,6 +79,7 @@
int bassEffectLevel = 0;
int eqPresetLevel = 0;
int frameLength = 256;
+ int trebleEffectLevel = 0;
LVM_BE_Mode_en bassEnable = LVM_BE_OFF;
LVM_TE_Mode_en trebleEnable = LVM_TE_OFF;
LVM_EQNB_Mode_en eqEnable = LVM_EQNB_OFF;
@@ -303,10 +304,6 @@
params->PSA_Enable = LVM_PSA_OFF;
params->PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM;
- /* TE Control parameters */
- params->TE_OperatingMode = LVM_TE_OFF;
- params->TE_EffectLevel = 0;
-
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, params);
@@ -445,6 +442,7 @@
/* Treble Enhancement parameters */
params->TE_OperatingMode = plvmConfigParams->trebleEnable;
+ params->TE_EffectLevel = plvmConfigParams->trebleEffectLevel;
/* PSA Control parameters */
params->PSA_Enable = LVM_PSA_ON;
@@ -604,6 +602,15 @@
return -1;
}
lvmConfigParams.eqPresetLevel = eqPresetLevel;
+ } else if (!strncmp(argv[i], "-trebleLvl:", 11)) {
+ const int trebleEffectLevel = atoi(argv[i] + 11);
+ if (trebleEffectLevel > LVM_TE_MAX_EFFECTLEVEL ||
+ trebleEffectLevel < LVM_TE_MIN_EFFECTLEVEL) {
+ printf("Error: Unsupported Treble Effect Level : %d\n", trebleEffectLevel);
+ printUsage();
+ return -1;
+ }
+ lvmConfigParams.trebleEffectLevel = trebleEffectLevel;
} else if (!strcmp(argv[i], "-bE")) {
lvmConfigParams.bassEnable = LVM_BE_ON;
} else if (!strcmp(argv[i], "-eqE")) {
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index 87ed8b6..c6e036a 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -18,15 +18,10 @@
],
}
-cc_library {
- name: "libaudiopreprocessing",
+cc_defaults {
+ name: "libaudiopreprocessing-defaults",
vendor: true,
- relative_install_path: "soundfx",
host_supported: true,
- srcs: ["PreProcessing.cpp"],
- local_include_dirs: [
- ".",
- ],
cflags: [
"-Wall",
"-Werror",
@@ -46,7 +41,6 @@
header_libs: [
"libaudioeffects",
"libhardware_headers",
- "libwebrtc_absl_headers",
],
target: {
darwin: {
@@ -54,3 +48,13 @@
},
},
}
+
+cc_library {
+ name: "libaudiopreprocessing",
+ defaults: ["libaudiopreprocessing-defaults"],
+ relative_install_path: "soundfx",
+ srcs: ["PreProcessing.cpp"],
+ header_libs: [
+ "libwebrtc_absl_headers",
+ ],
+}
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 3b0b6d6..19a8b2f 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -105,9 +105,8 @@
webrtc::AudioProcessing* apm; // handle on webRTC audio processing module (APM)
// Audio Processing module builder
webrtc::AudioProcessingBuilder ap_builder;
- size_t apmFrameCount; // buffer size for webRTC process (10 ms)
- uint32_t apmSamplingRate; // webRTC APM sampling rate (8/16 or 32 kHz)
- size_t frameCount; // buffer size before input resampler ( <=> apmFrameCount)
+ // frameCount represents the size of the buffers used for processing, and must represent 10ms.
+ size_t frameCount;
uint32_t samplingRate; // sampling rate at effect process interface
uint32_t inChannelCount; // input channel count
uint32_t outChannelCount; // output channel count
@@ -119,21 +118,12 @@
webrtc::AudioProcessing::Config config;
webrtc::StreamConfig inputConfig; // input stream configuration
webrtc::StreamConfig outputConfig; // output stream configuration
- int16_t* inBuf; // input buffer used when resampling
- size_t inBufSize; // input buffer size in frames
- size_t framesIn; // number of frames in input buffer
- int16_t* outBuf; // output buffer used when resampling
- size_t outBufSize; // output buffer size in frames
- size_t framesOut; // number of frames in output buffer
uint32_t revChannelCount; // number of channels on reverse stream
uint32_t revEnabledMsk; // bit field containing IDs of enabled pre processors
// with reverse channel
uint32_t revProcessedMsk; // bit field containing IDs of pre processors with reverse
// channel already processed in current round
webrtc::StreamConfig revConfig; // reverse stream configuration.
- int16_t* revBuf; // reverse channel input buffer
- size_t revBufSize; // reverse channel input buffer size
- size_t framesRev; // number of frames in reverse channel input buffer
};
#ifdef DUAL_MIC_TEST
@@ -862,9 +852,7 @@
ALOGW("Session_CreateEffect could not get apm engine");
goto error;
}
- session->apmSamplingRate = kPreprocDefaultSr;
- session->apmFrameCount = (kPreprocDefaultSr) / 100;
- session->frameCount = session->apmFrameCount;
+ session->frameCount = kPreprocDefaultSr / 100;
session->samplingRate = kPreprocDefaultSr;
session->inChannelCount = kPreProcDefaultCnl;
session->outChannelCount = kPreProcDefaultCnl;
@@ -879,12 +867,6 @@
session->processedMsk = 0;
session->revEnabledMsk = 0;
session->revProcessedMsk = 0;
- session->inBuf = NULL;
- session->inBufSize = 0;
- session->outBuf = NULL;
- session->outBufSize = 0;
- session->revBuf = NULL;
- session->revBufSize = 0;
}
status = Effect_Create(&session->effects[procId], session, interface);
if (status < 0) {
@@ -908,13 +890,6 @@
if (session->createdMsk == 0) {
delete session->apm;
session->apm = NULL;
- delete session->inBuf;
- session->inBuf = NULL;
- free(session->outBuf);
- session->outBuf = NULL;
- delete session->revBuf;
- session->revBuf = NULL;
-
session->id = 0;
}
@@ -934,24 +909,8 @@
ALOGV("Session_SetConfig sr %d cnl %08x", config->inputCfg.samplingRate,
config->inputCfg.channels);
- // AEC implementation is limited to 16kHz
- if (config->inputCfg.samplingRate >= 32000 && !(session->createdMsk & (1 << PREPROC_AEC))) {
- session->apmSamplingRate = 32000;
- } else if (config->inputCfg.samplingRate >= 16000) {
- session->apmSamplingRate = 16000;
- } else if (config->inputCfg.samplingRate >= 8000) {
- session->apmSamplingRate = 8000;
- }
-
-
session->samplingRate = config->inputCfg.samplingRate;
- session->apmFrameCount = session->apmSamplingRate / 100;
- if (session->samplingRate == session->apmSamplingRate) {
- session->frameCount = session->apmFrameCount;
- } else {
- session->frameCount =
- (session->apmFrameCount * session->samplingRate) / session->apmSamplingRate;
- }
+ session->frameCount = session->samplingRate / 100;
session->inChannelCount = inCnl;
session->outChannelCount = outCnl;
session->inputConfig.set_sample_rate_hz(session->samplingRate);
@@ -963,13 +922,6 @@
session->revConfig.set_sample_rate_hz(session->samplingRate);
session->revConfig.set_num_channels(inCnl);
- // force process buffer reallocation
- session->inBufSize = 0;
- session->outBufSize = 0;
- session->framesIn = 0;
- session->framesOut = 0;
-
-
session->state = PREPROC_SESSION_STATE_CONFIG;
return 0;
}
@@ -1004,9 +956,6 @@
}
uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
session->revChannelCount = inCnl;
- // force process buffer reallocation
- session->revBufSize = 0;
- session->framesRev = 0;
return 0;
}
@@ -1023,12 +972,8 @@
void Session_SetProcEnabled(preproc_session_t* session, uint32_t procId, bool enabled) {
if (enabled) {
- if (session->enabledMsk == 0) {
- session->framesIn = 0;
- }
session->enabledMsk |= (1 << procId);
if (HasReverseStream(procId)) {
- session->framesRev = 0;
session->revEnabledMsk |= (1 << procId);
}
} else {
@@ -1117,43 +1062,24 @@
return -EINVAL;
}
+ if (inBuffer->frameCount != outBuffer->frameCount) {
+ ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+ inBuffer->frameCount, outBuffer->frameCount);
+ return -EINVAL;
+ }
+
+ if (inBuffer->frameCount != session->frameCount) {
+ ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+ inBuffer->frameCount, session->frameCount, session->samplingRate);
+ return -EINVAL;
+ }
+
session->processedMsk |= (1 << effect->procId);
// ALOGV("PreProcessingFx_Process In %d frames enabledMsk %08x processedMsk %08x",
// inBuffer->frameCount, session->enabledMsk, session->processedMsk);
-
if ((session->processedMsk & session->enabledMsk) == session->enabledMsk) {
effect->session->processedMsk = 0;
- size_t framesRq = outBuffer->frameCount;
- size_t framesWr = 0;
- if (session->framesOut) {
- size_t fr = session->framesOut;
- if (outBuffer->frameCount < fr) {
- fr = outBuffer->frameCount;
- }
- memcpy(outBuffer->s16, session->outBuf,
- fr * session->outChannelCount * sizeof(int16_t));
- memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
- (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
- session->framesOut -= fr;
- framesWr += fr;
- }
- outBuffer->frameCount = framesWr;
- if (framesWr == framesRq) {
- inBuffer->frameCount = 0;
- return 0;
- }
-
- size_t fr = session->frameCount - session->framesIn;
- if (inBuffer->frameCount < fr) {
- fr = inBuffer->frameCount;
- }
- session->framesIn += fr;
- inBuffer->frameCount = fr;
- if (session->framesIn < session->frameCount) {
- return 0;
- }
- session->framesIn = 0;
if (int status = effect->session->apm->ProcessStream(
(const int16_t* const)inBuffer->s16,
(const webrtc::StreamConfig)effect->session->inputConfig,
@@ -1163,34 +1089,6 @@
ALOGE("Process Stream failed with error %d\n", status);
return status;
}
- outBuffer->frameCount = inBuffer->frameCount;
-
- if (session->outBufSize < session->framesOut + session->frameCount) {
- int16_t* buf;
- session->outBufSize = session->framesOut + session->frameCount;
- buf = (int16_t*)realloc(
- session->outBuf,
- session->outBufSize * session->outChannelCount * sizeof(int16_t));
- if (buf == NULL) {
- session->framesOut = 0;
- free(session->outBuf);
- session->outBuf = NULL;
- return -ENOMEM;
- }
- session->outBuf = buf;
- }
-
- fr = session->framesOut;
- if (framesRq - framesWr < fr) {
- fr = framesRq - framesWr;
- }
- memcpy(outBuffer->s16 + framesWr * session->outChannelCount, session->outBuf,
- fr * session->outChannelCount * sizeof(int16_t));
- memmove(session->outBuf, session->outBuf + fr * session->outChannelCount,
- (session->framesOut - fr) * session->outChannelCount * sizeof(int16_t));
- session->framesOut -= fr;
- outBuffer->frameCount += fr;
-
return 0;
} else {
return -ENODATA;
@@ -1565,6 +1463,18 @@
return -EINVAL;
}
+ if (inBuffer->frameCount != outBuffer->frameCount) {
+ ALOGW("inBuffer->frameCount %zu is not equal to outBuffer->frameCount %zu",
+ inBuffer->frameCount, outBuffer->frameCount);
+ return -EINVAL;
+ }
+
+ if (inBuffer->frameCount != session->frameCount) {
+ ALOGW("inBuffer->frameCount %zu != %zu representing 10ms at sampling rate %d",
+ inBuffer->frameCount, session->frameCount, session->samplingRate);
+ return -EINVAL;
+ }
+
session->revProcessedMsk |= (1 << effect->procId);
// ALOGV("PreProcessingFx_ProcessReverse In %d frames revEnabledMsk %08x revProcessedMsk
@@ -1573,16 +1483,6 @@
if ((session->revProcessedMsk & session->revEnabledMsk) == session->revEnabledMsk) {
effect->session->revProcessedMsk = 0;
- size_t fr = session->frameCount - session->framesRev;
- if (inBuffer->frameCount < fr) {
- fr = inBuffer->frameCount;
- }
- session->framesRev += fr;
- inBuffer->frameCount = fr;
- if (session->framesRev < session->frameCount) {
- return 0;
- }
- session->framesRev = 0;
if (int status = effect->session->apm->ProcessReverseStream(
(const int16_t* const)inBuffer->s16,
(const webrtc::StreamConfig)effect->session->revConfig,
diff --git a/media/libeffects/preprocessing/README.md b/media/libeffects/preprocessing/README.md
new file mode 100644
index 0000000..af46376
--- /dev/null
+++ b/media/libeffects/preprocessing/README.md
@@ -0,0 +1,7 @@
+# Preprocessing effects
+
+## Limitations
+- Preprocessing effects currently work on 10ms worth of data and do not support
+ arbitrary frame counts. This limiation comes from the underlying effects in
+ webrtc modules
+- There is currently no api to communicate this requirement
diff --git a/media/libeffects/preprocessing/benchmarks/Android.bp b/media/libeffects/preprocessing/benchmarks/Android.bp
index c1b2295..fbbcab4 100644
--- a/media/libeffects/preprocessing/benchmarks/Android.bp
+++ b/media/libeffects/preprocessing/benchmarks/Android.bp
@@ -11,27 +11,10 @@
cc_benchmark {
name: "preprocessing_benchmark",
- vendor: true,
+ defaults: ["libaudiopreprocessing-defaults"],
srcs: ["preprocessing_benchmark.cpp"],
- shared_libs: [
- "libaudioutils",
- "liblog",
- "libutils",
- ],
static_libs: [
"libaudiopreprocessing",
- "webrtc_audio_processing",
- ],
- cflags: [
- "-DWEBRTC_POSIX",
- "-fvisibility=default",
- "-Wall",
- "-Werror",
- "-Wextra",
- ],
- header_libs: [
- "libaudioeffects",
- "libhardware_headers",
- "libwebrtc_absl_headers",
+ "libaudioutils",
],
}
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index 18c6c98..d80b135 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -12,9 +12,8 @@
cc_test {
name: "EffectPreprocessingTest",
- vendor: true,
+ defaults: ["libaudiopreprocessing-defaults"],
gtest: true,
- host_supported: true,
test_suites: ["device-tests"],
srcs: [
"EffectPreprocessingTest.cpp",
@@ -23,46 +22,18 @@
static_libs: [
"libaudiopreprocessing",
"libaudioutils",
- "webrtc_audio_processing",
],
- shared_libs: [
- "liblog",
- ],
- header_libs: [
- "libaudioeffects",
- "libhardware_headers",
- ],
- target: {
- darwin: {
- enabled: false,
- },
- },
}
cc_test {
name: "AudioPreProcessingTest",
- vendor: true,
- host_supported: true,
+ defaults: ["libaudiopreprocessing-defaults"],
gtest: false,
srcs: ["PreProcessingTest.cpp"],
- shared_libs: [
- "libaudioutils",
- "liblog",
- "libutils",
- ],
static_libs: [
"libaudiopreprocessing",
- "webrtc_audio_processing",
+ "libaudioutils",
],
- header_libs: [
- "libaudioeffects",
- "libhardware_headers",
- ],
- target: {
- darwin: {
- enabled: false,
- },
- },
}
cc_test {
diff --git a/media/libeffects/preprocessing/tests/EffectTestHelper.h b/media/libeffects/preprocessing/tests/EffectTestHelper.h
index db06823..117cf7b 100644
--- a/media/libeffects/preprocessing/tests/EffectTestHelper.h
+++ b/media/libeffects/preprocessing/tests/EffectTestHelper.h
@@ -88,7 +88,8 @@
static constexpr size_t kNumChMasks = std::size(kChMasks);
- static constexpr size_t kSampleRates[] = {8000, 16000, 24000, 32000, 48000};
+ static constexpr size_t kSampleRates[] = {8000, 11025, 12000, 16000, 22050,
+ 24000, 32000, 44100, 48000};
static constexpr size_t kNumSampleRates = std::size(kSampleRates);
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index e0025fe..3bd93f8 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -451,8 +451,8 @@
}
audio_buffer_t inputBuffer, outputBuffer;
audio_buffer_t farInBuffer{};
- inputBuffer.frameCount = samplesRead;
- outputBuffer.frameCount = samplesRead;
+ inputBuffer.frameCount = frameLength;
+ outputBuffer.frameCount = frameLength;
inputBuffer.s16 = in.data();
outputBuffer.s16 = out.data();
@@ -472,7 +472,7 @@
}
}
- farInBuffer.frameCount = samplesRead;
+ farInBuffer.frameCount = frameLength;
farInBuffer.s16 = farIn.data();
}
@@ -519,6 +519,7 @@
}
frameCounter += frameLength;
}
+ printf("frameCounter: [%d]\n", frameCounter);
// Release all the effect handles created
for (int i = 0; i < PREPROC_NUM_EFFECTS; i++) {
if (int status = AUDIO_EFFECT_LIBRARY_INFO_SYM.release_effect(effectHandle[i]);
diff --git a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
index 942f2ec..35da13e 100755
--- a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
@@ -59,9 +59,13 @@
fs_arr=(
8000
+ 11025
+ 12000
16000
+ 22050
24000
32000
+ 44100
48000
)
diff --git a/media/libeffects/proxy/EffectProxy.cpp b/media/libeffects/proxy/EffectProxy.cpp
index c010d68..be9f8c0 100644
--- a/media/libeffects/proxy/EffectProxy.cpp
+++ b/media/libeffects/proxy/EffectProxy.cpp
@@ -116,6 +116,16 @@
pContext->sube[SUB_FX_OFFLOAD] = sube[1];
pContext->desc[SUB_FX_OFFLOAD] = desc[1];
pContext->aeli[SUB_FX_OFFLOAD] = aeli[1];
+ } else {
+ ALOGE("Both effects have (or don't have) EFFECT_FLAG_HW_ACC_TUNNEL flag");
+ delete[] sube;
+ delete[] desc;
+ delete[] aeli;
+ delete[] pContext->sube;
+ delete[] pContext->desc;
+ delete[] pContext->aeli;
+ delete pContext;
+ return -EINVAL;
}
delete[] desc;
delete[] aeli;
diff --git a/media/libmedia/tests/fuzzer/Android.bp b/media/libmedia/tests/fuzzer/Android.bp
new file mode 100644
index 0000000..c03b5b1
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/Android.bp
@@ -0,0 +1,19 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libmedia_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_media_libmedia_license"],
+}
+
+cc_fuzz {
+ name: "libmedia_metadata_fuzzer",
+ srcs: [
+ "libmedia_metadata_fuzzer.cpp",
+ ],
+ shared_libs: [
+ "libmedia",
+ "libbinder",
+ ],
+}
diff --git a/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
new file mode 100644
index 0000000..058e4e5
--- /dev/null
+++ b/media/libmedia/tests/fuzzer/libmedia_metadata_fuzzer.cpp
@@ -0,0 +1,52 @@
+//This program fuzzes Metadata.cpp
+
+#include <stddef.h>
+#include <stdint.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <media/Metadata.h>
+#include <binder/Parcel.h>
+
+using namespace android;
+using namespace media;
+
+static const float want_prob = 0.5;
+
+bool bytesRemain(FuzzedDataProvider *fdp);
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ FuzzedDataProvider fdp(data, size);
+ Parcel p;
+ Metadata md = Metadata(&p);
+
+ md.appendHeader();
+ while (bytesRemain(&fdp)) {
+
+ float got_prob = fdp.ConsumeProbability<float>();
+ if (!bytesRemain(&fdp)) {
+ break;
+ }
+
+ if (got_prob < want_prob) {
+ int32_t key_bool = fdp.ConsumeIntegral<int32_t>();
+ if (!bytesRemain(&fdp)) {
+ break;
+ }
+ bool val_bool = fdp.ConsumeBool();
+ md.appendBool(key_bool, val_bool);
+ } else {
+ int32_t key_int32 = fdp.ConsumeIntegral<int32_t>();
+ if (!bytesRemain(&fdp)) {
+ break;
+ }
+ bool val_int32 = fdp.ConsumeIntegral<int32_t>();
+ md.appendInt32(key_int32, val_int32);
+ }
+ md.updateLength();
+ }
+ md.resetParcel();
+ return 0;
+}
+
+bool bytesRemain(FuzzedDataProvider *fdp){
+ return fdp -> remaining_bytes() > 0;
+}
\ No newline at end of file
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 84388c9..674df17 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -139,6 +139,7 @@
#define AMEDIAMETRICS_PROP_SESSIONID "sessionId" // int32
#define AMEDIAMETRICS_PROP_SHARINGMODE "sharingMode" // string value, "exclusive", shared"
#define AMEDIAMETRICS_PROP_SOURCE "source" // string (AudioAttributes)
+#define AMEDIAMETRICS_PROP_STARTTHRESHOLDFRAMES "startThresholdFrames" // int32 (AudioTrack)
#define AMEDIAMETRICS_PROP_STARTUPMS "startupMs" // double value
// State is "ACTIVE" or "STOPPED" for AudioRecord
#define AMEDIAMETRICS_PROP_STATE "state" // string
@@ -181,6 +182,7 @@
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE "setMode" // AudioFlinger
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETBUFFERSIZE "setBufferSize" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME "setVoiceVolume" // AudioFlinger
#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME "setVolume" // AudioTrack
#define AMEDIAMETRICS_PROP_EVENT_VALUE_START "start" // AudioTrack, AudioRecord
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 9ab117a..c5d3c1d 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -35,6 +35,7 @@
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
"libbase",
+ "libactivitymanager_aidl",
"libandroid_net",
"libaudioclient",
"libbinder",
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 1054b68..ca98b28 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -46,13 +46,11 @@
status_t result;
result = mStream->getBufferSize(&mStreamBufferSizeBytes);
if (result != OK) return result;
- audio_format_t streamFormat;
- uint32_t sampleRate;
- audio_channel_mask_t channelMask;
- result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+ audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+ result = mStream->getAudioProperties(&config);
if (result != OK) return result;
- mFormat = Format_from_SR_C(sampleRate,
- audio_channel_count_from_in_mask(channelMask), streamFormat);
+ mFormat = Format_from_SR_C(config.sample_rate,
+ audio_channel_count_from_in_mask(config.channel_mask), config.format);
mFrameSize = Format_frameSize(mFormat);
}
return NBAIO_Source::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index 8564899..581867f 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -44,13 +44,11 @@
status_t result;
result = mStream->getBufferSize(&mStreamBufferSizeBytes);
if (result != OK) return result;
- audio_format_t streamFormat;
- uint32_t sampleRate;
- audio_channel_mask_t channelMask;
- result = mStream->getAudioProperties(&sampleRate, &channelMask, &streamFormat);
+ audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+ result = mStream->getAudioProperties(&config);
if (result != OK) return result;
- mFormat = Format_from_SR_C(sampleRate,
- audio_channel_count_from_out_mask(channelMask), streamFormat);
+ mFormat = Format_from_SR_C(config.sample_rate,
+ audio_channel_count_from_out_mask(config.channel_mask), config.format);
mFrameSize = Format_frameSize(mFormat);
}
return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 71a4ad8..224ec8b 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -5683,15 +5683,18 @@
int32_t range, standard, transfer;
convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer);
+ int32_t dsRange, dsStandard, dsTransfer;
+ getColorConfigFromDataSpace(dataSpace, &dsRange, &dsStandard, &dsTransfer);
+
// if some aspects are unspecified, use dataspace fields
if (range == 0) {
- range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+ range = dsRange;
}
if (standard == 0) {
- standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+ standard = dsStandard;
}
if (transfer == 0) {
- transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+ transfer = dsTransfer;
}
mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 4406efd..8e721d4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1981,6 +1981,22 @@
return OK;
}
+status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
+ return mCodec->querySupportedParameters(names);
+}
+
+status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
+ return mCodec->describeParameter(name, desc);
+}
+
+status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
+ return mCodec->subscribeToParameters(names);
+}
+
+status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
+ return mCodec->unsubscribeFromParameters(names);
+}
+
void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) {
sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
msg->setMessage("notify", notify);
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 0f7df24..6c4addf 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -715,6 +715,9 @@
status_t MediaCodecSource::feedEncoderInputBuffers() {
MediaBufferBase* mbuf = NULL;
while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
+ if (!mEncoder) {
+ return BAD_VALUE;
+ }
size_t bufferIndex = *mAvailEncoderInputIndices.begin();
mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
@@ -775,12 +778,9 @@
android_dataspace dataspace = static_cast<android_dataspace>(ds);
ColorUtils::convertDataSpaceToV0(dataspace);
ALOGD("Updating dataspace to %x", dataspace);
- int32_t standard = (int32_t(dataspace) & HAL_DATASPACE_STANDARD_MASK)
- >> HAL_DATASPACE_STANDARD_SHIFT;
- int32_t transfer = (int32_t(dataspace) & HAL_DATASPACE_TRANSFER_MASK)
- >> HAL_DATASPACE_TRANSFER_SHIFT;
- int32_t range = (int32_t(dataspace) & HAL_DATASPACE_RANGE_MASK)
- >> HAL_DATASPACE_RANGE_SHIFT;
+ int32_t standard, transfer, range;
+ ColorUtils::getColorConfigFromDataSpace(
+ dataspace, &range, &standard, &transfer);
sp<AMessage> msg = new AMessage;
msg->setInt32(KEY_COLOR_STANDARD, standard);
msg->setInt32(KEY_COLOR_TRANSFER, transfer);
@@ -1151,7 +1151,7 @@
if (mFlags & FLAG_USE_SURFACE_INPUT) {
sp<AMessage> params = new AMessage;
params->setInt64(PARAMETER_KEY_OFFSET_TIME, mInputBufferTimeOffsetUs);
- err = mEncoder->setParameters(params);
+ err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
}
sp<AMessage> response = new AMessage;
@@ -1171,7 +1171,7 @@
if (mFlags & FLAG_USE_SURFACE_INPUT) {
sp<AMessage> params = new AMessage;
params->setInt64("stop-time-us", stopTimeUs);
- err = mEncoder->setParameters(params);
+ err = mEncoder ? mEncoder->setParameters(params) : BAD_VALUE;
} else {
err = mPuller->setStopTimeUs(stopTimeUs);
}
diff --git a/media/libstagefright/OWNERS b/media/libstagefright/OWNERS
index 819389d..0cc2294 100644
--- a/media/libstagefright/OWNERS
+++ b/media/libstagefright/OWNERS
@@ -4,4 +4,8 @@
lajos@google.com
marcone@google.com
taklee@google.com
-wonsik@google.com
\ No newline at end of file
+wonsik@google.com
+
+# LON
+olly@google.com
+andrewlewis@google.com
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 070e325..3812afe 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -613,6 +613,35 @@
}
// static
+void ColorUtils::getColorConfigFromDataSpace(
+ const android_dataspace &dataspace, int32_t *range, int32_t *standard, int32_t *transfer) {
+ uint32_t gfxRange =
+ (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+ uint32_t gfxStandard =
+ (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+ uint32_t gfxTransfer =
+ (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+
+ // assume 1-to-1 mapping to HAL values (to deal with potential vendor extensions)
+ CU::ColorRange cuRange = CU::kColorRangeUnspecified;
+ CU::ColorStandard cuStandard = CU::kColorStandardUnspecified;
+ CU::ColorTransfer cuTransfer = CU::kColorTransferUnspecified;
+ // TRICKY: use & to ensure all three mappings are completed
+ if (!(sGfxRanges.map(gfxRange, &cuRange) & sGfxStandards.map(gfxStandard, &cuStandard)
+ & sGfxTransfers.map(gfxTransfer, &cuTransfer))) {
+ ALOGW("could not safely map graphics dataspace (R:%u S:%u T:%u) to "
+ "platform color aspects (R:%u(%s) S:%u(%s) T:%u(%s)",
+ gfxRange, gfxStandard, gfxTransfer,
+ cuRange, asString(cuRange),
+ cuStandard, asString(cuStandard),
+ cuTransfer, asString(cuTransfer));
+ }
+ *range = cuRange;
+ *standard = cuStandard;
+ *transfer = cuTransfer;
+}
+
+// static
void ColorUtils::getColorConfigFromFormat(
const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
if (!format->findInt32("color-range", range)) {
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index cd0af2b..9e3f718 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -156,6 +156,10 @@
// suited to blending. This requires implicit color space conversion on part of the device.
static android_dataspace getDataSpaceForColorAspects(ColorAspects &aspects, bool mayExpand);
+ // it returns the platform color configs from given |dataspace|.
+ static void getColorConfigFromDataSpace(
+ const android_dataspace &dataspace, int *range, int *standard, int *transfer);
+
// converts |dataSpace| to a V0 enum, and returns true if dataSpace is an aspect-only value
static bool convertDataSpaceToV0(android_dataspace &dataSpace);
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index dd6df90..2e98fec 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -61,6 +61,11 @@
using hardware::cas::native::V1_0::IDescrambler;
+struct CodecParameterDescriptor {
+ std::string name;
+ AMessage::Type type;
+};
+
struct CodecBase : public AHandler, /* static */ ColorUtils {
/**
* This interface defines events firing from CodecBase back to MediaCodec.
@@ -233,6 +238,64 @@
virtual void signalSetParameters(const sp<AMessage> &msg) = 0;
virtual void signalEndOfInputStream() = 0;
+ /**
+ * Query supported parameters from this instance, and fill |names| with the
+ * names of the parameters.
+ *
+ * \param names string vector to fill with supported parameters.
+ * \return OK if successful;
+ * BAD_VALUE if |names| is null;
+ * INVALID_OPERATION if already released;
+ * ERROR_UNSUPPORTED if not supported.
+ */
+ virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
+ return ERROR_UNSUPPORTED;
+ }
+ /**
+ * Fill |desc| with description of the parameter with |name|.
+ *
+ * \param name name of the parameter to describe
+ * \param desc pointer to CodecParameterDescriptor to be filled
+ * \return OK if successful;
+ * BAD_VALUE if |desc| is null;
+ * NAME_NOT_FOUND if |name| is not recognized by the component;
+ * INVALID_OPERATION if already released;
+ * ERROR_UNSUPPORTED if not supported.
+ */
+ virtual status_t describeParameter(
+ [[maybe_unused]] const std::string &name,
+ [[maybe_unused]] CodecParameterDescriptor *desc) {
+ return ERROR_UNSUPPORTED;
+ }
+ /**
+ * Subscribe to parameters in |names| and get output format change event
+ * when they change.
+ * Unrecognized / already subscribed parameters are ignored.
+ *
+ * \param names names of parameters to subscribe
+ * \return OK if successful;
+ * INVALID_OPERATION if already released;
+ * ERROR_UNSUPPORTED if not supported.
+ */
+ virtual status_t subscribeToParameters(
+ [[maybe_unused]] const std::vector<std::string> &names) {
+ return ERROR_UNSUPPORTED;
+ }
+ /**
+ * Unsubscribe from parameters in |names| and no longer get
+ * output format change event when they change.
+ * Unrecognized / already unsubscribed parameters are ignored.
+ *
+ * \param names names of parameters to unsubscribe
+ * \return OK if successful;
+ * INVALID_OPERATION if already released;
+ * ERROR_UNSUPPORTED if not supported.
+ */
+ virtual status_t unsubscribeFromParameters(
+ [[maybe_unused]] const std::vector<std::string> &names) {
+ return ERROR_UNSUPPORTED;
+ }
+
typedef CodecBase *(*CreateCodecFunc)(void);
typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index a28d479..8952376 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -50,6 +50,7 @@
struct BatteryChecker;
class BufferChannelBase;
struct CodecBase;
+struct CodecParameterDescriptor;
class IBatteryStats;
struct ICrypto;
class MediaCodecBuffer;
@@ -246,6 +247,11 @@
status_t setParameters(const sp<AMessage> ¶ms);
+ status_t querySupportedVendorParameters(std::vector<std::string> *names);
+ status_t describeParameter(const std::string &name, CodecParameterDescriptor *desc);
+ status_t subscribeToVendorParameters(const std::vector<std::string> &names);
+ status_t unsubscribeFromVendorParameters(const std::vector<std::string> &names);
+
// Create a MediaCodec notification message from a list of rendered or dropped render infos
// by adding rendered frame information to a base notification message. Returns the number
// of frames that were rendered.
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 9793b89..bee96b1 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -752,7 +752,7 @@
constexpr char KEY_CA_SYSTEM_ID[] = "ca-system-id";
constexpr char KEY_CA_PRIVATE_DATA[] = "ca-private-data";
constexpr char KEY_CAPTURE_RATE[] = "capture-rate";
-constexpr char KEY_CHANNEL_COUNT[] = "channel-count";
+constexpr char KEY_CHANNEL_COUNT[] = "channel-count"; // value N, eq to range 1..N
constexpr char KEY_CHANNEL_MASK[] = "channel-mask";
constexpr char KEY_COLOR_FORMAT[] = "color-format";
constexpr char KEY_COLOR_RANGE[] = "color-range";
@@ -807,6 +807,14 @@
constexpr char KEY_TILE_HEIGHT[] = "tile-height";
constexpr char KEY_TILE_WIDTH[] = "tile-width";
constexpr char KEY_TRACK_ID[] = "track-id";
+constexpr char KEY_VIDEO_QP_B_MAX[] = "video-qp-b-max";
+constexpr char KEY_VIDEO_QP_B_MIN[] = "video-qp-b-min";
+constexpr char KEY_VIDEO_QP_I_MAX[] = "video-qp-i-max";
+constexpr char KEY_VIDEO_QP_I_MIN[] = "video-qp-i-min";
+constexpr char KEY_VIDEO_QP_MAX[] = "video-qp-max";
+constexpr char KEY_VIDEO_QP_MIN[] = "video-qp-min";
+constexpr char KEY_VIDEO_QP_P_MAX[] = "video-qp-p-max";
+constexpr char KEY_VIDEO_QP_P_MIN[] = "video-qp-p-min";
constexpr char KEY_WIDTH[] = "width";
// from MediaCodec.java
diff --git a/media/tests/SampleVideoEncoder/README.md b/media/tests/SampleVideoEncoder/README.md
index 074c939..2e275c5 100644
--- a/media/tests/SampleVideoEncoder/README.md
+++ b/media/tests/SampleVideoEncoder/README.md
@@ -2,7 +2,7 @@
This is a sample android application for encoding AVC/HEVC streams with B-Frames enabled. It uses MediaRecorder APIs to record B-frames enabled video from camera2 input and MediaCodec APIs to encode reference test vector using input surface.
-This page describes how to get started with the Encoder App.
+This page describes how to get started with the Encoder App and how to run the tests for it.
# Getting Started
@@ -33,6 +33,17 @@
After installing the app, a TextureView showing camera preview is dispalyed on one third of the screen. It also features checkboxes to select either avc/hevc and hw/sw codecs. It also has an option to select either MediaRecorder APIs or MediaCodec, along with the 'Start' button to start/stop recording.
+# Running Tests
+
+The app also contains a test, which will test the MediaCodec APIs for encoding avc/hevc streams with B-frames enabled. This does not require us to use application UI.
+
+## Running the tests using atest
+Note that atest command will install the SampleVideoEncoder app on the device.
+
+Command to run the tests:
+```
+atest SampleVideoEncoder
+```
# Ouput
@@ -40,3 +51,6 @@
```
/storage/emulated/0/Android/data/com.android.media.samplevideoencoder/files/
```
+
+The total number of I-frames, P-frames and B-frames after encoding has been done using MediaCodec APIs are displayed on the screen.
+The results of the tests can be obtained from the logcats of the test.
diff --git a/media/tests/SampleVideoEncoder/app/Android.bp b/media/tests/SampleVideoEncoder/app/Android.bp
index 3a66955..58b219b 100644
--- a/media/tests/SampleVideoEncoder/app/Android.bp
+++ b/media/tests/SampleVideoEncoder/app/Android.bp
@@ -23,7 +23,7 @@
default_applicable_licenses: ["frameworks_av_license"],
}
-android_app {
+android_test {
name: "SampleVideoEncoder",
manifest: "src/main/AndroidManifest.xml",
@@ -41,6 +41,10 @@
"androidx.annotation_annotation",
"androidx.appcompat_appcompat",
"androidx-constraintlayout_constraintlayout",
+ "junit",
+ "androidx.test.core",
+ "androidx.test.runner",
+ "hamcrest-library",
],
javacflags: [
diff --git a/media/tests/SampleVideoEncoder/app/AndroidTest.xml b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
new file mode 100644
index 0000000..91f4304
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/AndroidTest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Runs SampleVideoEncoder Tests">
+ <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
+ <option name="cleanup-apks" value="false" />
+ <option name="test-file-name" value="SampleVideoEncoder.apk" />
+ </target_preparer>
+
+ <option name="test-tag" value="SampleVideoEncoder" />
+ <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+ <option name="package" value="com.android.media.samplevideoencoder" />
+ <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
+ <option name="hidden-api-checks" value="false"/>
+ </test>
+</configuration>
diff --git a/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
new file mode 100644
index 0000000..1ef332e
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/androidTest/java/com/android/media/samplevideoencoder/tests/SampleVideoEncoderTest.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.samplevideoencoder.tests;
+
+import androidx.test.platform.app.InstrumentationRegistry;
+
+import android.content.Context;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import com.android.media.samplevideoencoder.MediaCodecSurfaceEncoder;
+import com.android.media.samplevideoencoder.R;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertThat;
+
+@RunWith(Parameterized.class)
+public class SampleVideoEncoderTest {
+ private static final String TAG = SampleVideoEncoderTest.class.getSimpleName();
+ private final Context mContext;
+ private int mMaxBFrames;
+ private int mInputResId;
+ private String mMime;
+ private boolean mIsSoftwareEncoder;
+
+ @Parameterized.Parameters
+ public static Collection<Object[]> inputFiles() {
+ return Arrays.asList(new Object[][]{
+ // Parameters: MimeType, isSoftwareEncoder, maxBFrames
+ {MediaFormat.MIMETYPE_VIDEO_AVC, false, 1},
+ {MediaFormat.MIMETYPE_VIDEO_AVC, true, 1},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, false, 1},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, true, 1}});
+ }
+
+ public SampleVideoEncoderTest(String mimeType, boolean isSoftwareEncoder, int maxBFrames) {
+ this.mContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
+ this.mInputResId = R.raw.crowd_1920x1080_25fps_4000kbps_h265;
+ this.mMime = mimeType;
+ this.mIsSoftwareEncoder = isSoftwareEncoder;
+ this.mMaxBFrames = maxBFrames;
+ }
+
+ private String getOutputPath() {
+ File dir = mContext.getExternalFilesDir(null);
+ if (dir == null) {
+ Log.e(TAG, "Cannot get external directory path to save output video");
+ return null;
+ }
+ String videoPath = dir.getAbsolutePath() + "/Video-" + System.currentTimeMillis() + ".mp4";
+ Log.i(TAG, "Output video is saved at: " + videoPath);
+ return videoPath;
+ }
+
+ @Test
+ public void testMediaSurfaceEncoder() throws IOException, InterruptedException {
+ String outputFilePath = getOutputPath();
+ MediaCodecSurfaceEncoder surfaceEncoder =
+ new MediaCodecSurfaceEncoder(mContext, mInputResId, mMime, mIsSoftwareEncoder,
+ outputFilePath, mMaxBFrames);
+ int encodingStatus = surfaceEncoder.startEncodingSurface();
+ assertThat(encodingStatus, is(equalTo(0)));
+ int[] frameNumArray = surfaceEncoder.getFrameTypes();
+ Log.i(TAG, "Results: I-Frames: " + frameNumArray[0] + "; P-Frames: " + frameNumArray[1] +
+ "\n " + "; B-Frames:" + frameNumArray[2]);
+ assertNotEquals("Encoder mime: " + mMime + " isSoftware: " + mIsSoftwareEncoder +
+ " failed to generate B Frames", frameNumArray[2], 0);
+ }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
index ed668bb..b17541d 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
+++ b/media/tests/SampleVideoEncoder/app/src/main/AndroidManifest.xml
@@ -38,4 +38,8 @@
</activity>
</application>
+ <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+ android:targetPackage="com.android.media.samplevideoencoder"
+ android:label="SampleVideoEncoder Test"/>
+
</manifest>
\ No newline at end of file
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
index 33e81bb..a7a353c 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java
@@ -56,6 +56,7 @@
import android.util.Log;
import android.util.Size;
import android.widget.RadioGroup;
+import android.widget.TextView;
import android.widget.Toast;
import java.lang.ref.WeakReference;
@@ -80,6 +81,14 @@
private static final int VIDEO_BITRATE = 8000000 /* 8 Mbps */;
private static final int VIDEO_FRAMERATE = 30;
+ /**
+ * Constant values to frame types assigned here are internal to this app.
+ * These values does not correspond to the actual values defined in avc/hevc specifications.
+ */
+ public static final int FRAME_TYPE_I = 0;
+ public static final int FRAME_TYPE_P = 1;
+ public static final int FRAME_TYPE_B = 2;
+
private String mMime = MediaFormat.MIMETYPE_VIDEO_AVC;
private String mOutputVideoPath = null;
@@ -89,6 +98,7 @@
private boolean mIsRecording;
private AutoFitTextureView mTextureView;
+ private TextView mTextView;
private CameraDevice mCameraDevice;
private CameraCaptureSession mPreviewSession;
private CaptureRequest.Builder mPreviewBuilder;
@@ -101,6 +111,8 @@
private Button mStartButton;
+ private int[] mFrameTypeOccurrences;
+
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
@@ -129,6 +141,8 @@
final CheckBox checkBox_mr = findViewById(R.id.checkBox_media_recorder);
final CheckBox checkBox_mc = findViewById(R.id.checkBox_media_codec);
mTextureView = findViewById(R.id.texture);
+ mTextView = findViewById(R.id.textViewResults);
+
checkBox_mr.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
@@ -162,6 +176,7 @@
@Override
public void onClick(View v) {
if (v.getId() == R.id.start_button) {
+ mTextView.setText(null);
if (mIsMediaRecorder) {
if (mIsRecording) {
stopRecordingVideo();
@@ -198,6 +213,7 @@
mainActivity.mOutputVideoPath);
try {
encodingStatus = codecSurfaceEncoder.startEncodingSurface();
+ mainActivity.mFrameTypeOccurrences = codecSurfaceEncoder.getFrameTypes();
} catch (IOException | InterruptedException e) {
e.printStackTrace();
}
@@ -211,6 +227,13 @@
if (encodingStatus == 0) {
Toast.makeText(mainActivity.getApplicationContext(), "Encoding Completed",
Toast.LENGTH_SHORT).show();
+ mainActivity.mTextView.append("\n Encoded stream contains: ");
+ mainActivity.mTextView.append("\n Number of I-Frames: " +
+ mainActivity.mFrameTypeOccurrences[FRAME_TYPE_I]);
+ mainActivity.mTextView.append("\n Number of P-Frames: " +
+ mainActivity.mFrameTypeOccurrences[FRAME_TYPE_P]);
+ mainActivity.mTextView.append("\n Number of B-Frames: " +
+ mainActivity.mFrameTypeOccurrences[FRAME_TYPE_B]);
} else {
Toast.makeText(mainActivity.getApplicationContext(),
"Error occurred while " + "encoding", Toast.LENGTH_SHORT).show();
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
index 146a475..011c38c 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java
@@ -31,10 +31,14 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
+import java.util.Arrays;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
public class MediaCodecSurfaceEncoder {
private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
-
private static final boolean DEBUG = false;
private static final int VIDEO_BITRATE = 8000000 /*8 Mbps*/;
private static final int VIDEO_FRAMERATE = 30;
@@ -44,6 +48,8 @@
private final String mMime;
private final String mOutputPath;
private int mTrackID = -1;
+ private int mFrameNum = 0;
+ private int[] mFrameTypeOccurrences = {0, 0, 0};
private Surface mSurface;
private MediaExtractor mExtractor;
@@ -128,8 +134,10 @@
mEncoder.reset();
mSurface.release();
mSurface = null;
+ Log.i(TAG, "Number of I-frames = " + mFrameTypeOccurrences[FRAME_TYPE_I]);
+ Log.i(TAG, "Number of P-frames = " + mFrameTypeOccurrences[FRAME_TYPE_P]);
+ Log.i(TAG, "Number of B-frames = " + mFrameTypeOccurrences[FRAME_TYPE_B]);
}
-
mEncoder.release();
mDecoder.release();
mExtractor.release();
@@ -193,6 +201,8 @@
mSawEncOutputEOS = false;
mDecOutputCount = 0;
mEncOutputCount = 0;
+ mFrameNum = 0;
+ Arrays.fill(mFrameTypeOccurrences, 0);
}
private void configureCodec(MediaFormat decFormat, MediaFormat encFormat) {
@@ -336,6 +346,21 @@
}
if (info.size > 0) {
ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex);
+ // Parse the buffer to get the frame type
+ if (DEBUG) Log.d(TAG, "[ Frame : " + (mFrameNum++) + " ]");
+ int frameTypeResult = -1;
+ if (mMime == MediaFormat.MIMETYPE_VIDEO_AVC) {
+ frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromAVC(buf);
+ } else if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC){
+ frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromHEVC(buf);
+ } else {
+ Log.e(TAG, "Mime type " + mMime + " is not supported.");
+ return;
+ }
+ if (frameTypeResult != -1) {
+ mFrameTypeOccurrences[frameTypeResult]++;
+ }
+
if (mMuxer != null) {
if (mTrackID == -1) {
mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat());
@@ -353,4 +378,8 @@
private boolean hasSeenError() {
return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError();
}
+
+ public int[] getFrameTypes() {
+ return mFrameTypeOccurrences;
+ }
}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
new file mode 100644
index 0000000..efff4fd
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I;
+import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P;
+
+public class NalUnitUtil {
+ private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName();
+ private static final boolean DEBUG = false;
+
+ public static int findNalUnit(byte[] dataArray, int pos, int limit) {
+ int startOffset = 0;
+ if (limit - pos < 4) {
+ return startOffset;
+ }
+ if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 1) {
+ startOffset = 3;
+ } else {
+ if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 0 &&
+ dataArray[pos + 3] == 1) {
+ startOffset = 4;
+ }
+ }
+ return startOffset;
+ }
+
+ private static int getAVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+ return dataArray[nalUnitOffset] & 0x1F;
+ }
+
+ private static int parseAVCNALUnitData(byte[] dataArray, int offset, int limit) {
+ ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+ bitArray.reset(dataArray, offset, limit);
+
+ bitArray.skipBit(); // forbidden_zero_bit
+ bitArray.readBits(2); // nal_ref_idc
+ bitArray.skipBits(5); // nal_unit_type
+
+ bitArray.readUEV(); // first_mb_in_slice
+ if (!bitArray.canReadUEV()) {
+ return -1;
+ }
+ int sliceType = bitArray.readUEV();
+ if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+ if (sliceType == 0) {
+ return FRAME_TYPE_P;
+ } else if (sliceType == 1) {
+ return FRAME_TYPE_B;
+ } else if (sliceType == 2) {
+ return FRAME_TYPE_I;
+ } else {
+ return -1;
+ }
+ }
+
+ private static int getHEVCNalUnitType(byte[] dataArray, int nalUnitOffset) {
+ return (dataArray[nalUnitOffset] & 0x7E) >> 1;
+ }
+
+ private static int parseHEVCNALUnitData(byte[] dataArray, int offset, int limit,
+ int nalUnitType) {
+ // nal_unit_type values from H.265/HEVC Table 7-1.
+ final int BLA_W_LP = 16;
+ final int RSV_IRAP_VCL23 = 23;
+
+ ParsableBitArray bitArray = new ParsableBitArray(dataArray);
+ bitArray.reset(dataArray, offset, limit);
+
+ bitArray.skipBit(); // forbidden zero bit
+ bitArray.readBits(6); // nal_unit_header
+ bitArray.readBits(6); // nuh_layer_id
+ bitArray.readBits(3); // nuh_temporal_id_plus1
+
+ // Parsing slice_segment_header values from H.265/HEVC Table 7.3.6.1
+ boolean first_slice_segment = bitArray.readBit(); // first_slice_segment_in_pic_flag
+ if (!first_slice_segment) return -1;
+ if (nalUnitType >= BLA_W_LP && nalUnitType <= RSV_IRAP_VCL23) {
+ bitArray.readBit(); // no_output_of_prior_pics_flag
+ }
+ bitArray.readUEV(); // slice_pic_parameter_set_id
+ // Assume num_extra_slice_header_bits element of PPS data to be 0
+ int sliceType = bitArray.readUEV();
+ if (DEBUG) Log.d(TAG, "slice_type = " + sliceType);
+ if (sliceType == 0) {
+ return FRAME_TYPE_B;
+ } else if (sliceType == 1) {
+ return FRAME_TYPE_P;
+ } else if (sliceType == 2) {
+ return FRAME_TYPE_I;
+ } else {
+ return -1;
+ }
+ }
+
+ public static int getStandardizedFrameTypesFromAVC(ByteBuffer buf) {
+ int limit = buf.limit();
+ byte[] dataArray = new byte[buf.remaining()];
+ buf.get(dataArray);
+ int frameType = -1;
+ for (int pos = 0; pos + 3 < limit; ) {
+ int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+ if (startOffset != 0) {
+ int nalUnitType = getAVCNalUnitType(dataArray, (pos + startOffset));
+ if (DEBUG) {
+ Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+ Log.d(TAG, "NalUnitType = " + nalUnitType);
+ }
+ // SLICE_NAL = 1; IDR_SLICE_NAL = 5
+ if (nalUnitType == 1 || nalUnitType == 5) {
+ frameType = parseAVCNALUnitData(dataArray, (pos + startOffset),
+ (limit - pos - startOffset));
+ break;
+ }
+ pos += 3;
+ } else {
+ pos++;
+ }
+ }
+ return frameType;
+ }
+
+ public static int getStandardizedFrameTypesFromHEVC(ByteBuffer buf) {
+ int limit = buf.limit();
+ byte[] dataArray = new byte[buf.remaining()];
+ buf.get(dataArray);
+ int frameType = -1;
+ for (int pos = 0; pos + 3 < limit; ) {
+ int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit);
+ if (startOffset != 0) {
+ int nalUnitType = NalUnitUtil.getHEVCNalUnitType(dataArray, (pos + startOffset));
+ if (DEBUG) {
+ Log.d(TAG, "NalUnitOffset = " + (pos + startOffset));
+ Log.d(TAG, "NalUnitType = " + nalUnitType);
+ }
+ // Parse NALUnits containing slice_headers which lies in the range of 0 to 21
+ if (nalUnitType >= 0 && nalUnitType <= 21) {
+ frameType = parseHEVCNALUnitData(dataArray, (pos + startOffset),
+ (limit - pos - startOffset), nalUnitType);
+ break;
+ }
+ pos += 3;
+ } else {
+ pos++;
+ }
+ }
+ return frameType;
+ }
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
new file mode 100644
index 0000000..e4bfaa3
--- /dev/null
+++ b/media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.samplevideoencoder;
+
+public class ParsableBitArray {
+ public byte[] data;
+ private int byteOffset;
+ private int bitOffset;
+ private int byteLimit;
+
+ public ParsableBitArray(byte[] dataArray) {
+ this(dataArray, dataArray.length);
+ }
+
+ public ParsableBitArray(byte[] dataArray, int limit) {
+ this.data = dataArray;
+ byteLimit = limit;
+ }
+
+ public void reset(byte[] data, int offset, int limit) {
+ this.data = data;
+ byteOffset = offset;
+ bitOffset = 0;
+ byteLimit = limit;
+ }
+
+ public void skipBit() {
+ if (++bitOffset == 8) {
+ bitOffset = 0;
+ byteOffset++;
+ }
+ }
+
+ public void skipBits(int numBits) {
+ int numBytes = numBits / 8;
+ byteOffset += numBytes;
+ bitOffset += numBits - (numBytes * 8);
+ if (bitOffset > 7) {
+ byteOffset++;
+ bitOffset -= 8;
+ }
+ }
+
+ public boolean readBit() {
+ boolean returnValue = (data[byteOffset] & (0x80 >> bitOffset)) != 0;
+ skipBit();
+ return returnValue;
+ }
+
+ public int readBits(int numBits) {
+ if (numBits == 0) {
+ return 0;
+ }
+ int returnValue = 0;
+ bitOffset += numBits;
+ while (bitOffset > 8) {
+ bitOffset -= 8;
+ returnValue |= (data[byteOffset++] & 0xFF) << bitOffset;
+ }
+ returnValue |= (data[byteOffset] & 0xFF) >> (8 - bitOffset);
+ returnValue &= 0xFFFFFFFF >>> (32 - numBits);
+ if (bitOffset == 8) {
+ bitOffset = 0;
+ byteOffset++;
+ }
+ return returnValue;
+ }
+
+ public boolean canReadUEV() {
+ int initialByteOffset = byteOffset;
+ int initialBitOffset = bitOffset;
+ int leadingZeros = 0;
+ while (byteOffset < byteLimit && !readBit()) {
+ leadingZeros++;
+ }
+ boolean hitLimit = byteOffset == byteLimit;
+ byteOffset = initialByteOffset;
+ bitOffset = initialBitOffset;
+ return !hitLimit && canReadBits(leadingZeros * 2 + 1);
+ }
+
+ public int readUEV() {
+ int leadingZeros = 0;
+ while (!readBit()) {
+ leadingZeros++;
+ }
+ return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
+ }
+
+ public boolean canReadBits(int numBits) {
+ int oldByteOffset = byteOffset;
+ int numBytes = numBits / 8;
+ int newByteOffset = byteOffset + numBytes;
+ int newBitOffset = bitOffset + numBits - (numBytes * 8);
+ if (newBitOffset > 7) {
+ newByteOffset++;
+ newBitOffset -= 8;
+ }
+ for (int i = oldByteOffset + 1; i <= newByteOffset && newByteOffset < byteLimit; i++) {
+ if (shouldSkipByte(i)) {
+ // Skip the byte and check three bytes ahead.
+ newByteOffset++;
+ i += 2;
+ }
+ }
+ return newByteOffset < byteLimit || (newByteOffset == byteLimit && newBitOffset == 0);
+ }
+
+ private boolean shouldSkipByte(int offset) {
+ return (2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 &&
+ data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00);
+ }
+
+}
diff --git a/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
index 164e02a..017012d 100644
--- a/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
+++ b/media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml
@@ -124,4 +124,15 @@
</FrameLayout>
+ <TextView
+ android:id="@+id/textViewResults"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginTop="10dp"
+ android:fontFamily="sans-serif-medium"
+ android:textSize="18sp"
+ android:textStyle="normal"
+ app:layout_constraintStart_toStartOf="parent"
+ app:layout_constraintTop_toBottomOf = "@+id/frameLayout2" />
+
</androidx.constraintlayout.widget.ConstraintLayout>
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 748afeb..12179db 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -37,6 +37,8 @@
],
static_libs: [
"libc_malloc_debug_backtrace",
+ "libbatterystats_aidl",
+ "libprocessinfoservice_aidl",
],
shared_libs: [
"libaudioutils", // for clock.h
@@ -48,6 +50,9 @@
"android.hardware.graphics.bufferqueue@1.0",
"android.hidl.token@1.0-utils",
],
+ export_static_lib_headers: [
+ "libbatterystats_aidl",
+ ],
logtags: ["EventLogTags.logtags"],
diff --git a/media/utils/ProcessInfo.cpp b/media/utils/ProcessInfo.cpp
index 113e4a7..e9c9f8d 100644
--- a/media/utils/ProcessInfo.cpp
+++ b/media/utils/ProcessInfo.cpp
@@ -21,9 +21,9 @@
#include <media/stagefright/ProcessInfo.h>
#include <binder/IPCThreadState.h>
-#include <binder/IProcessInfoService.h>
#include <binder/IServiceManager.h>
#include <private/android_filesystem_config.h>
+#include <processinfo/IProcessInfoService.h>
namespace android {
diff --git a/media/utils/fuzzers/Android.bp b/media/utils/fuzzers/Android.bp
index 80882b2..5c03926 100644
--- a/media/utils/fuzzers/Android.bp
+++ b/media/utils/fuzzers/Android.bp
@@ -10,6 +10,7 @@
cc_defaults {
name: "libmediautils_fuzzer_defaults",
shared_libs: [
+ "libbatterystats_aidl",
"libbinder",
"libcutils",
"liblog",
diff --git a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
index 4521853..130feee 100644
--- a/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
+++ b/media/utils/fuzzers/SchedulingPolicyServiceFuzz.cpp
@@ -14,7 +14,7 @@
* limitations under the License.
*/
#define LOG_TAG "BatteryNotifierFuzzer"
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
#include <binder/IServiceManager.h>
#include <utils/String16.h>
#include <android/log.h>
diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h
index a4e42ad..3812d7a 100644
--- a/media/utils/include/mediautils/BatteryNotifier.h
+++ b/media/utils/include/mediautils/BatteryNotifier.h
@@ -17,7 +17,7 @@
#ifndef MEDIA_BATTERY_NOTIFIER_H
#define MEDIA_BATTERY_NOTIFIER_H
-#include <binder/IBatteryStats.h>
+#include <batterystats/IBatteryStats.h>
#include <utils/Singleton.h>
#include <utils/String8.h>
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
index 7e06096..d8565bd 100644
--- a/services/audioflinger/AudioStreamOut.cpp
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -173,22 +173,15 @@
return status;
}
-audio_format_t AudioStreamOut::getFormat() const
+audio_config_base_t AudioStreamOut::getAudioProperties() const
{
- audio_format_t result;
- return stream->getFormat(&result) == OK ? result : AUDIO_FORMAT_INVALID;
-}
-
-uint32_t AudioStreamOut::getSampleRate() const
-{
- uint32_t result;
- return stream->getSampleRate(&result) == OK ? result : 0;
-}
-
-audio_channel_mask_t AudioStreamOut::getChannelMask() const
-{
- audio_channel_mask_t result;
- return stream->getChannelMask(&result) == OK ? result : AUDIO_CHANNEL_INVALID;
+ audio_config_base_t result = AUDIO_CONFIG_BASE_INITIALIZER;
+ if (stream->getAudioProperties(&result) != OK) {
+ result.sample_rate = 0;
+ result.channel_mask = AUDIO_CHANNEL_INVALID;
+ result.format = AUDIO_FORMAT_INVALID;
+ }
+ return result;
}
int AudioStreamOut::flush()
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
index 16fbcf2..565f43a 100644
--- a/services/audioflinger/AudioStreamOut.h
+++ b/services/audioflinger/AudioStreamOut.h
@@ -81,22 +81,14 @@
virtual size_t getFrameSize() const { return mHalFrameSize; }
/**
- * @return format from the perspective of the application and the AudioFlinger.
+ * @return audio stream configuration: channel mask, format, sample rate:
+ * - channel mask from the perspective of the application and the AudioFlinger,
+ * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI;
+ * - format from the perspective of the application and the AudioFlinger;
+ * - sample rate from the perspective of the application and the AudioFlinger,
+ * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
*/
- virtual audio_format_t getFormat() const;
-
- /**
- * The HAL may be running at a higher sample rate if, for example, playing wrapped EAC3.
- * @return sample rate from the perspective of the application and the AudioFlinger.
- */
- virtual uint32_t getSampleRate() const;
-
- /**
- * The HAL is in stereo mode when playing multi-channel compressed audio over HDMI.
- * @return channel mask from the perspective of the application and the AudioFlinger.
- */
- virtual audio_channel_mask_t getChannelMask() const;
-
+ virtual audio_config_base_t getAudioProperties() const;
virtual status_t flush();
virtual status_t standby();
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index cd3c743..13e2ced 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -353,7 +353,8 @@
#endif
//ALOGD("Eric FastMixer::onWork() mIsWarm");
} else {
- dumpState->mTimestampVerifier.discontinuity();
+ dumpState->mTimestampVerifier.discontinuity(
+ dumpState->mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
// See comment in if block.
#ifdef FASTMIXER_LOG_HIST_TS
LOG_AUDIO_STATE();
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 7804822..9770054 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -217,6 +217,10 @@
void flushAck();
bool isResumePending();
void resumeAck();
+ // For direct or offloaded tracks ensure that the pause state is acknowledged
+ // by the playback thread in case of an immediate flush.
+ bool isPausePending() const { return mPauseHwPending; }
+ void pauseAck();
void updateTrackFrameInfo(int64_t trackFramesReleased, int64_t sinkFramesWritten,
uint32_t halSampleRate, const ExtendedTimestamp &timeStamp);
@@ -284,8 +288,6 @@
};
sp<AudioVibrationController> mAudioVibrationController;
sp<os::ExternalVibration> mExternalVibration;
- /** How many frames should be in the buffer before the track is considered ready */
- const size_t mFrameCountToBeReady;
audio_dual_mono_mode_t mDualMonoMode = AUDIO_DUAL_MONO_MODE_OFF;
float mAudioDescriptionMixLevel = -std::numeric_limits<float>::infinity();
@@ -314,6 +316,7 @@
sp<AudioTrackServerProxy> mAudioTrackServerProxy;
bool mResumeToStopping; // track was paused in stopping state.
bool mFlushHwPending; // track requests for thread flush
+ bool mPauseHwPending = false; // direct/offload track request for thread pause
audio_output_flags_t mFlags;
// If the last track change was notified to the client with readAndClearHasChanged
std::atomic_flag mChangeNotified = ATOMIC_FLAG_INIT;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 927d87e..deb13af 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -622,7 +622,7 @@
mIoJitterMs.reset();
mLatencyMs.reset();
mProcessTimeMs.reset();
- mTimestampVerifier.discontinuity();
+ mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
sp<ConfigEvent> configEvent = (ConfigEvent *)new IoConfigEvent(event, pid, portId);
sendConfigEvent_l(configEvent);
@@ -2719,7 +2719,7 @@
// the timestamp frame position to reset to 0 for direct and offload threads.
// (Out of sequence requests are ignored, since the discontinuity would be handled
// elsewhere, e.g. in flush).
- mTimestampVerifier.discontinuity();
+ mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
mDrainSequence &= ~1;
mWaitWorkCV.signal();
}
@@ -2728,8 +2728,9 @@
void AudioFlinger::PlaybackThread::readOutputParameters_l()
{
// unfortunately we have no way of recovering from errors here, hence the LOG_ALWAYS_FATAL
- mSampleRate = mOutput->getSampleRate();
- mChannelMask = mOutput->getChannelMask();
+ const audio_config_base_t audioConfig = mOutput->getAudioProperties();
+ mSampleRate = audioConfig.sample_rate;
+ mChannelMask = audioConfig.channel_mask;
if (!audio_is_output_channel(mChannelMask)) {
LOG_ALWAYS_FATAL("HAL channel mask %#x not valid for output", mChannelMask);
}
@@ -2742,11 +2743,11 @@
mBalance.setChannelMask(mChannelMask);
// Get actual HAL format.
- status_t result = mOutput->stream->getFormat(&mHALFormat);
+ status_t result = mOutput->stream->getAudioProperties(nullptr, nullptr, &mHALFormat);
LOG_ALWAYS_FATAL_IF(result != OK, "Error when retrieving output stream format: %d", result);
// Get format from the shim, which will be different than the HAL format
// if playing compressed audio over HDMI passthrough.
- mFormat = mOutput->getFormat();
+ mFormat = audioConfig.format;
if (!audio_is_valid_format(mFormat)) {
LOG_ALWAYS_FATAL("HAL format %#x not valid for output", mFormat);
}
@@ -3408,7 +3409,6 @@
mStandbyTimeNs = systemTime();
int64_t lastLoopCountWritten = -2; // never matches "previous" loop, when loopCount = 0.
- int64_t lastFramesWritten = -1; // track changes in timestamp server frames written
// MIXER
nsecs_t lastWarning = 0;
@@ -3444,14 +3444,6 @@
checkSilentMode_l();
- // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
- // TODO: add confirmation checks:
- // 1) DIRECT threads and linear PCM format really resets to 0?
- // 2) Is frame count really valid if not linear pcm?
- // 3) Are all 64 bits of position returned, not just lowest 32 bits?
- if (mType == OFFLOAD || mType == DIRECT) {
- mTimestampVerifier.setDiscontinuityMode(mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
- }
audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
// loopCount is used for statistics and diagnostics.
@@ -3523,135 +3515,8 @@
logString = NULL;
}
- // Collect timestamp statistics for the Playback Thread types that support it.
- if (mType == MIXER
- || mType == DUPLICATING
- || mType == DIRECT
- || mType == OFFLOAD) { // no indentation
- // Gather the framesReleased counters for all active tracks,
- // and associate with the sink frames written out. We need
- // this to convert the sink timestamp to the track timestamp.
- bool kernelLocationUpdate = false;
- ExtendedTimestamp timestamp; // use private copy to fetch
- if (mStandby) {
- mTimestampVerifier.discontinuity();
- } else if (threadloop_getHalTimestamp_l(×tamp) == OK) {
- mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
- timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
- mSampleRate);
+ collectTimestamps_l();
- if (isTimestampCorrectionEnabled()) {
- ALOGVV("TS_BEFORE: %d %lld %lld", id(),
- (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
- (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
- auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
- timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
- = correctedTimestamp.mFrames;
- timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
- = correctedTimestamp.mTimeNs;
- ALOGVV("TS_AFTER: %d %lld %lld", id(),
- (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
- (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
-
- // Note: Downstream latency only added if timestamp correction enabled.
- if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
- const int64_t newPosition =
- timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
- - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
- // prevent retrograde
- timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
- newPosition,
- (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
- - mSuspendedFrames));
- }
- }
-
- // We always fetch the timestamp here because often the downstream
- // sink will block while writing.
-
- // We keep track of the last valid kernel position in case we are in underrun
- // and the normal mixer period is the same as the fast mixer period, or there
- // is some error from the HAL.
- if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
-
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
- }
-
- if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
- kernelLocationUpdate = true;
- } else {
- ALOGVV("getTimestamp error - no valid kernel position");
- }
-
- // copy over kernel info
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
- timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
- + mSuspendedFrames; // add frames discarded when suspended
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
- timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
- } else {
- mTimestampVerifier.error();
- }
-
- // mFramesWritten for non-offloaded tracks are contiguous
- // even after standby() is called. This is useful for the track frame
- // to sink frame mapping.
- bool serverLocationUpdate = false;
- if (mFramesWritten != lastFramesWritten) {
- serverLocationUpdate = true;
- lastFramesWritten = mFramesWritten;
- }
- // Only update timestamps if there is a meaningful change.
- // Either the kernel timestamp must be valid or we have written something.
- if (kernelLocationUpdate || serverLocationUpdate) {
- if (serverLocationUpdate) {
- // use the time before we called the HAL write - it is a bit more accurate
- // to when the server last read data than the current time here.
- //
- // If we haven't written anything, mLastIoBeginNs will be -1
- // and we use systemTime().
- mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
- mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
- ? systemTime() : mLastIoBeginNs;
- }
-
- for (const sp<Track> &t : mActiveTracks) {
- if (!t->isFastTrack()) {
- t->updateTrackFrameInfo(
- t->mAudioTrackServerProxy->framesReleased(),
- mFramesWritten,
- mSampleRate,
- mTimestamp);
- }
- }
- }
-
- if (audio_has_proportional_frames(mFormat)) {
- const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
- if (latencyMs != 0.) { // note 0. means timestamp is empty.
- mLatencyMs.add(latencyMs);
- }
- }
-
- } // if (mType ... ) { // no indentation
-#if 0
- // logFormat example
- if (z % 100 == 0) {
- timespec ts;
- clock_gettime(CLOCK_MONOTONIC, &ts);
- LOGT("This is an integer %d, this is a float %f, this is my "
- "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
- LOGT("A deceptive null-terminated string %\0");
- }
- ++z;
-#endif
saveOutputTracks();
if (mSignalPending) {
// A signal was raised while we were unlocked
@@ -4091,6 +3956,148 @@
return false;
}
+void AudioFlinger::PlaybackThread::collectTimestamps_l()
+{
+ // Collect timestamp statistics for the Playback Thread types that support it.
+ if (mType != MIXER
+ && mType != DUPLICATING
+ && mType != DIRECT
+ && mType != OFFLOAD) {
+ return;
+ }
+ if (mStandby) {
+ mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
+ return;
+ } else if (mHwPaused) {
+ mTimestampVerifier.discontinuity(mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS);
+ return;
+ }
+
+ // Gather the framesReleased counters for all active tracks,
+ // and associate with the sink frames written out. We need
+ // this to convert the sink timestamp to the track timestamp.
+ bool kernelLocationUpdate = false;
+ ExtendedTimestamp timestamp; // use private copy to fetch
+
+ // Always query HAL timestamp and update timestamp verifier. In standby or pause,
+ // HAL may be draining some small duration buffered data for fade out.
+ if (threadloop_getHalTimestamp_l(×tamp) == OK) {
+ mTimestampVerifier.add(timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL],
+ timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+ mSampleRate);
+
+ if (isTimestampCorrectionEnabled()) {
+ ALOGVV("TS_BEFORE: %d %lld %lld", id(),
+ (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+ (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+ auto correctedTimestamp = mTimestampVerifier.getLastCorrectedTimestamp();
+ timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+ = correctedTimestamp.mFrames;
+ timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]
+ = correctedTimestamp.mTimeNs;
+ ALOGVV("TS_AFTER: %d %lld %lld", id(),
+ (long long)timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL],
+ (long long)timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]);
+
+ // Note: Downstream latency only added if timestamp correction enabled.
+ if (mDownstreamLatencyStatMs.getN() > 0) { // we have latency info.
+ const int64_t newPosition =
+ timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+ - int64_t(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
+ // prevent retrograde
+ timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] = max(
+ newPosition,
+ (mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+ - mSuspendedFrames));
+ }
+ }
+
+ // We always fetch the timestamp here because often the downstream
+ // sink will block while writing.
+
+ // We keep track of the last valid kernel position in case we are in underrun
+ // and the normal mixer period is the same as the fast mixer period, or there
+ // is some error from the HAL.
+ if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL_LASTKERNELOK] =
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER];
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER_LASTKERNELOK] =
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER];
+ }
+
+ if (timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] >= 0) {
+ kernelLocationUpdate = true;
+ } else {
+ ALOGVV("getTimestamp error - no valid kernel position");
+ }
+
+ // copy over kernel info
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL] =
+ timestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL]
+ + mSuspendedFrames; // add frames discarded when suspended
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] =
+ timestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+ } else {
+ mTimestampVerifier.error();
+ }
+
+ // mFramesWritten for non-offloaded tracks are contiguous
+ // even after standby() is called. This is useful for the track frame
+ // to sink frame mapping.
+ bool serverLocationUpdate = false;
+ if (mFramesWritten != mLastFramesWritten) {
+ serverLocationUpdate = true;
+ mLastFramesWritten = mFramesWritten;
+ }
+ // Only update timestamps if there is a meaningful change.
+ // Either the kernel timestamp must be valid or we have written something.
+ if (kernelLocationUpdate || serverLocationUpdate) {
+ if (serverLocationUpdate) {
+ // use the time before we called the HAL write - it is a bit more accurate
+ // to when the server last read data than the current time here.
+ //
+ // If we haven't written anything, mLastIoBeginNs will be -1
+ // and we use systemTime().
+ mTimestamp.mPosition[ExtendedTimestamp::LOCATION_SERVER] = mFramesWritten;
+ mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_SERVER] = mLastIoBeginNs == -1
+ ? systemTime() : mLastIoBeginNs;
+ }
+
+ for (const sp<Track> &t : mActiveTracks) {
+ if (!t->isFastTrack()) {
+ t->updateTrackFrameInfo(
+ t->mAudioTrackServerProxy->framesReleased(),
+ mFramesWritten,
+ mSampleRate,
+ mTimestamp);
+ }
+ }
+ }
+
+ if (audio_has_proportional_frames(mFormat)) {
+ const double latencyMs = mTimestamp.getOutputServerLatencyMs(mSampleRate);
+ if (latencyMs != 0.) { // note 0. means timestamp is empty.
+ mLatencyMs.add(latencyMs);
+ }
+ }
+#if 0
+ // logFormat example
+ if (z % 100 == 0) {
+ timespec ts;
+ clock_gettime(CLOCK_MONOTONIC, &ts);
+ LOGT("This is an integer %d, this is a float %f, this is my "
+ "pid %p %% %s %t", 42, 3.14, "and this is a timestamp", ts);
+ LOGT("A deceptive null-terminated string %\0");
+ }
+ ++z;
+#endif
+}
+
// removeTracks_l() must be called with ThreadBase::mLock held
void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
{
@@ -4137,20 +4144,15 @@
return status;
}
if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL) {
- uint64_t position64;
- if (mOutput->getPresentationPosition(&position64, ×tamp.mTime) == OK) {
- timestamp.mPosition = (uint32_t)position64;
- if (mDownstreamLatencyStatMs.getN() > 0) {
- const uint32_t positionOffset =
- (uint32_t)(mDownstreamLatencyStatMs.getMean() * mSampleRate * 1e-3);
- if (positionOffset > timestamp.mPosition) {
- timestamp.mPosition = 0;
- } else {
- timestamp.mPosition -= positionOffset;
- }
- }
- return NO_ERROR;
+ collectTimestamps_l();
+ if (mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL] <= 0) {
+ return INVALID_OPERATION;
}
+ timestamp.mPosition = mTimestamp.mPosition[ExtendedTimestamp::LOCATION_KERNEL];
+ const int64_t timeNs = mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL];
+ timestamp.mTime.tv_sec = timeNs / NANOS_PER_SECOND;
+ timestamp.mTime.tv_nsec = timeNs - (timestamp.mTime.tv_sec * NANOS_PER_SECOND);
+ return NO_ERROR;
}
return INVALID_OPERATION;
}
@@ -5551,8 +5553,6 @@
status_t& status)
{
bool reconfig = false;
- bool a2dpDeviceChanged = false;
-
status = NO_ERROR;
AutoPark<FastMixer> park(mFastMixer);
@@ -5624,7 +5624,7 @@
}
}
- return reconfig || a2dpDeviceChanged;
+ return reconfig;
}
@@ -5825,8 +5825,15 @@
sp<Track> l = mActiveTracks.getLatest();
bool last = l.get() == track;
- if (track->isPausing()) {
- track->setPaused();
+ if (track->isPausePending()) {
+ track->pauseAck();
+ // It is possible a track might have been flushed or stopped.
+ // Other operations such as flush pending might occur on the next prepare.
+ if (track->isPausing()) {
+ track->setPaused();
+ }
+ // Always perform pause, as an immediate flush will change
+ // the pause state to be no longer isPausing().
if (mHwSupportsPause && last && !mHwPaused) {
doHwPause = true;
mHwPaused = true;
@@ -6085,8 +6092,6 @@
status_t& status)
{
bool reconfig = false;
- bool a2dpDeviceChanged = false;
-
status = NO_ERROR;
AudioParameter param = AudioParameter(keyValuePair);
@@ -6121,7 +6126,7 @@
}
}
- return reconfig || a2dpDeviceChanged;
+ return reconfig;
}
uint32_t AudioFlinger::DirectOutputThread::activeSleepTimeUs() const
@@ -6178,7 +6183,7 @@
mOutput->flush();
mHwPaused = false;
mFlushPending = false;
- mTimestampVerifier.discontinuity(); // DIRECT and OFFLOADED flush resets frame count.
+ mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
mTimestamp.clear();
}
@@ -6370,8 +6375,15 @@
continue;
}
- if (track->isPausing()) {
- track->setPaused();
+ if (track->isPausePending()) {
+ track->pauseAck();
+ // It is possible a track might have been flushed or stopped.
+ // Other operations such as flush pending might occur on the next prepare.
+ if (track->isPausing()) {
+ track->setPaused();
+ }
+ // Always perform pause if last, as an immediate flush will change
+ // the pause state to be no longer isPausing().
if (last) {
if (mHwSupportsPause && !mHwPaused) {
doHwPause = true;
@@ -6514,13 +6526,14 @@
track->presentationComplete(framesWritten, audioHALFrames);
track->reset();
tracksToRemove->add(track);
- // DIRECT and OFFLOADED stop resets frame counts.
+ // OFFLOADED stop resets frame counts.
if (!mUseAsyncWrite) {
// If we don't get explicit drain notification we must
// register discontinuity regardless of whether this is
// the previous (!last) or the upcoming (last) track
// to avoid skipping the discontinuity.
- mTimestampVerifier.discontinuity();
+ mTimestampVerifier.discontinuity(
+ mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
}
}
} else {
@@ -7378,7 +7391,9 @@
if (mPipeSource.get() == nullptr /* don't obtain for FastCapture, could block */) {
int64_t position, time;
if (mStandby) {
- mTimestampVerifier.discontinuity();
+ mTimestampVerifier.discontinuity(audio_is_linear_pcm(mFormat) ?
+ mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS :
+ mTimestampVerifier.DISCONTINUITY_MODE_ZERO);
} else if (mSource->getCapturePosition(&position, &time) == NO_ERROR
&& time > mTimestamp.mTimeNs[ExtendedTimestamp::LOCATION_KERNEL]) {
@@ -8370,13 +8385,11 @@
}
if (reconfig) {
if (status == BAD_VALUE) {
- uint32_t sRate;
- audio_channel_mask_t channelMask;
- audio_format_t format;
- if (mInput->stream->getAudioProperties(&sRate, &channelMask, &format) == OK &&
- audio_is_linear_pcm(format) && audio_is_linear_pcm(reqFormat) &&
- sRate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
- audio_channel_count_from_in_mask(channelMask) <= FCC_8) {
+ audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
+ if (mInput->stream->getAudioProperties(&config) == OK &&
+ audio_is_linear_pcm(config.format) && audio_is_linear_pcm(reqFormat) &&
+ config.sample_rate <= (AUDIO_RESAMPLER_DOWN_RATIO_MAX * samplingRate) &&
+ audio_channel_count_from_in_mask(config.channel_mask) <= FCC_8) {
status = NO_ERROR;
}
}
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 709a3cc..9f65562 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -590,6 +590,11 @@
ExtendedTimestamp mTimestamp;
TimestampVerifier< // For timestamp statistics.
int64_t /* frame count */, int64_t /* time ns */> mTimestampVerifier;
+ // DIRECT and OFFLOAD threads should reset frame count to zero on stop/flush
+ // TODO: add confirmation checks:
+ // 1) DIRECT threads and linear PCM format really resets to 0?
+ // 2) Is frame count really valid if not linear pcm?
+ // 3) Are all 64 bits of position returned, not just lowest 32 bits?
// Timestamp corrected device should be a single device.
audio_devices_t mTimestampCorrectedDevice = AUDIO_DEVICE_NONE;
@@ -1023,6 +1028,8 @@
int64_t mBytesWritten;
int64_t mFramesWritten; // not reset on standby
+ int64_t mLastFramesWritten = -1; // track changes in timestamp
+ // server frames written.
int64_t mSuspendedFrames; // not reset on standby
// mHapticChannelMask and mHapticChannelCount will only be valid when the thread support
@@ -1035,6 +1042,14 @@
// copy rather than the one in AudioFlinger. This optimization saves a lock.
bool mMasterMute;
void setMasterMute_l(bool muted) { mMasterMute = muted; }
+
+ auto discontinuityForStandbyOrFlush() const { // call on threadLoop or with lock.
+ return ((mType == DIRECT && !audio_is_linear_pcm(mFormat))
+ || mType == OFFLOAD)
+ ? mTimestampVerifier.DISCONTINUITY_MODE_ZERO
+ : mTimestampVerifier.DISCONTINUITY_MODE_CONTINUOUS;
+ }
+
protected:
ActiveTracks<Track> mActiveTracks;
@@ -1081,6 +1096,8 @@
void updateMetadata_l() final;
virtual void sendMetadataToBackend_l(const StreamOutHalInterface::SourceMetadata& metadata);
+ void collectTimestamps_l();
+
// The Tracks class manages tracks added and removed from the Thread.
template <typename T>
class Tracks {
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index ee886d5..4353b3d 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -638,7 +638,6 @@
mOpPlayAudioMonitor(OpPlayAudioMonitor::createIfNeeded(
uid, attr, id(), streamType, opPackageName)),
// mSinkTimestamp
- mFrameCountToBeReady(frameCountToBeReady),
mFastIndex(-1),
mCachedVolume(1.0),
/* The track might not play immediately after being active, similarly as if its volume was 0.
@@ -672,6 +671,7 @@
mFrameSize, sampleRate);
}
mServerProxy = mAudioTrackServerProxy;
+ mServerProxy->setStartThresholdInFrames(frameCountToBeReady); // update the Cblk value
// only allocate a fast track index if we were able to allocate a normal track name
if (flags & AUDIO_OUTPUT_FLAG_FAST) {
@@ -999,7 +999,10 @@
}
size_t bufferSizeInFrames = mServerProxy->getBufferSizeInFrames();
- size_t framesToBeReady = std::min(mFrameCountToBeReady, bufferSizeInFrames);
+ // Note: mServerProxy->getStartThresholdInFrames() is clamped.
+ const size_t startThresholdInFrames = mServerProxy->getStartThresholdInFrames();
+ const size_t framesToBeReady = std::clamp( // clamp again to validate client values.
+ std::min(startThresholdInFrames, bufferSizeInFrames), size_t(1), mFrameCount);
if (framesReady() >= framesToBeReady || (mCblk->mFlags & CBLK_FORCEREADY)) {
ALOGV("%s(%d): consider track ready with %zu/%zu, target was %zu)",
@@ -1038,6 +1041,11 @@
// initial state-stopping. next state-pausing.
// What if resume is called ?
+ if (state == FLUSHED) {
+ // avoid underrun glitches when starting after flush
+ reset();
+ }
+
if (state == PAUSED || state == PAUSING) {
if (mResumeToStopping) {
// happened we need to resume to STOPPING_1
@@ -1169,6 +1177,9 @@
mState = PAUSING;
ALOGV("%s(%d): ACTIVE/RESUMING => PAUSING on thread %d",
__func__, mId, (int)mThreadIoHandle);
+ if (isOffloadedOrDirect()) {
+ mPauseHwPending = true;
+ }
playbackThread->broadcast_l();
break;
@@ -1256,6 +1267,11 @@
mFlushHwPending = false;
}
+void AudioFlinger::PlaybackThread::Track::pauseAck()
+{
+ mPauseHwPending = false;
+}
+
void AudioFlinger::PlaybackThread::Track::reset()
{
// Do not reset twice to avoid discarding data written just after a flush and before
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index ca29591..2038aa9 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -218,7 +218,9 @@
add(devices);
return size();
}
- return SortedVector::merge(devices);
+ ssize_t ret = SortedVector::merge(devices);
+ refreshTypes();
+ return ret;
}
/**
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index 8c7fb97..20c2c28 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -17,6 +17,8 @@
#define LOG_TAG "APM::AudioPolicyEngine/Base"
//#define LOG_NDEBUG 0
+#include <sys/stat.h>
+
#include "EngineBase.h"
#include "EngineDefaultConfig.h"
#include <TypeConverter.h>
@@ -147,8 +149,13 @@
});
return iter != end(volumeGroups);
};
+ auto fileExists = [](const char* path) {
+ struct stat fileStat;
+ return stat(path, &fileStat) == 0 && S_ISREG(fileStat.st_mode);
+ };
- auto result = engineConfig::parse();
+ auto result = fileExists(engineConfig::DEFAULT_PATH) ?
+ engineConfig::parse(engineConfig::DEFAULT_PATH) : engineConfig::ParsingResult{};
if (result.parsedConfig == nullptr) {
ALOGW("%s: No configuration found, using default matching phone experience.", __FUNCTION__);
engineConfig::Config config = gDefaultEngineConfig;
diff --git a/services/audiopolicy/service/Android.mk b/services/audiopolicy/service/Android.mk
index 7015b7b..7be10c4 100644
--- a/services/audiopolicy/service/Android.mk
+++ b/services/audiopolicy/service/Android.mk
@@ -19,6 +19,7 @@
libaudiopolicymanager_interface_headers
LOCAL_SHARED_LIBRARIES := \
+ libactivitymanager_aidl \
libcutils \
libutils \
liblog \
@@ -36,6 +37,7 @@
capture_state_listener-aidl-cpp
LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := \
+ libactivitymanager_aidl \
libsensorprivacy
LOCAL_STATIC_LIBRARIES := \
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 11df5f3..46698b3 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -113,6 +113,7 @@
"libutilscallstack",
"libutils",
"libbinder",
+ "libactivitymanager_aidl",
"libcutils",
"libmedia",
"libmediautils",
@@ -150,11 +151,13 @@
],
static_libs: [
+ "libprocessinfoservice_aidl",
"libbinderthreadstateutils",
],
export_shared_lib_headers: [
"libbinder",
+ "libactivitymanager_aidl",
"libcamera_client",
"libfmq",
"libsensorprivacy",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3d9998a..91dda92 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -41,7 +41,6 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/PermissionController.h>
-#include <binder/ProcessInfoService.h>
#include <binder/IResultReceiver.h>
#include <binderthreadstate/CallerUtils.h>
#include <cutils/atomic.h>
@@ -57,6 +56,7 @@
#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
#include <mediautils/BatteryNotifier.h>
+#include <processinfo/ProcessInfoService.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/String16.h>
@@ -252,10 +252,16 @@
proxyBinder->pingForUserUpdate();
}
-void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status) {
+void CameraService::broadcastTorchModeStatus(const String8& cameraId, TorchModeStatus status,
+ SystemCameraKind systemCameraKind) {
Mutex::Autolock lock(mStatusListenerLock);
-
for (auto& i : mListenerList) {
+ if (shouldSkipStatusUpdates(systemCameraKind, i->isVendorListener(), i->getListenerPid(),
+ i->getListenerUid())) {
+ ALOGV("Skipping torch callback for system-only camera device %s",
+ cameraId.c_str());
+ continue;
+ }
i->getListener()->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
}
}
@@ -341,7 +347,7 @@
Mutex::Autolock al(mTorchStatusMutex);
mTorchStatusMap.add(id, TorchModeStatus::AVAILABLE_OFF);
- broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF);
+ broadcastTorchModeStatus(id, TorchModeStatus::AVAILABLE_OFF, deviceKind);
}
updateCameraNumAndIds();
@@ -502,12 +508,19 @@
void CameraService::onTorchStatusChanged(const String8& cameraId,
TorchModeStatus newStatus) {
+ SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC;
+ status_t res = getSystemCameraKind(cameraId, &systemCameraKind);
+ if (res != OK) {
+ ALOGE("%s: Could not get system camera kind for camera id %s", __FUNCTION__,
+ cameraId.string());
+ return;
+ }
Mutex::Autolock al(mTorchStatusMutex);
- onTorchStatusChangedLocked(cameraId, newStatus);
+ onTorchStatusChangedLocked(cameraId, newStatus, systemCameraKind);
}
void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
- TorchModeStatus newStatus) {
+ TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
__FUNCTION__, cameraId.string(), newStatus);
@@ -556,8 +569,7 @@
}
}
}
-
- broadcastTorchModeStatus(cameraId, newStatus);
+ broadcastTorchModeStatus(cameraId, newStatus, systemCameraKind);
}
static bool hasPermissionsForSystemCamera(int callingPid, int callingUid) {
@@ -1864,6 +1876,10 @@
String8 id = String8(cameraId.string());
int uid = CameraThreadState::getCallingUid();
+ if (shouldRejectSystemCameraConnection(id)) {
+ return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to set torch mode"
+ " for system only device %s: ", id.string());
+ }
// verify id is valid.
auto state = getCameraState(id);
if (state == nullptr) {
@@ -2220,6 +2236,11 @@
return shouldSkipStatusUpdates(deviceKind, isVendorListener, clientPid,
clientUid);}), cameraStatuses->end());
+ //cameraStatuses will have non-eligible camera ids removed.
+ std::set<String16> idsChosenForCallback;
+ for (const auto &s : *cameraStatuses) {
+ idsChosenForCallback.insert(String16(s.cameraId));
+ }
/*
* Immediately signal current torch status to this listener only
@@ -2229,7 +2250,11 @@
Mutex::Autolock al(mTorchStatusMutex);
for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) {
String16 id = String16(mTorchStatusMap.keyAt(i).string());
- listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+ // The camera id is visible to the client. Fine to send torch
+ // callback.
+ if (idsChosenForCallback.find(id) != idsChosenForCallback.end()) {
+ listener->onTorchStatusChanged(mapToInterface(mTorchStatusMap.valueAt(i)), id);
+ }
}
}
@@ -3766,7 +3791,7 @@
TorchModeStatus::AVAILABLE_OFF :
TorchModeStatus::NOT_AVAILABLE;
if (torchStatus != newTorchStatus) {
- onTorchStatusChangedLocked(cameraId, newTorchStatus);
+ onTorchStatusChangedLocked(cameraId, newTorchStatus, deviceKind);
}
}
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 6771718..2853b0c 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -996,7 +996,8 @@
// handle torch mode status change and invoke callbacks. mTorchStatusMutex
// should be locked.
void onTorchStatusChangedLocked(const String8& cameraId,
- hardware::camera::common::V1_0::TorchModeStatus newStatus);
+ hardware::camera::common::V1_0::TorchModeStatus newStatus,
+ SystemCameraKind systemCameraKind);
// get a camera's torch status. mTorchStatusMutex should be locked.
status_t getTorchStatusLocked(const String8 &cameraId,
@@ -1085,7 +1086,8 @@
static void pingCameraServiceProxy();
void broadcastTorchModeStatus(const String8& cameraId,
- hardware::camera::common::V1_0::TorchModeStatus status);
+ hardware::camera::common::V1_0::TorchModeStatus status,
+ SystemCameraKind systemCameraKind);
void disconnectClient(const String8& id, sp<BasicClient> clientToDisconnect);
diff --git a/services/mediametrics/Android.bp b/services/mediametrics/Android.bp
index b2a0cda..f13ca74 100644
--- a/services/mediametrics/Android.bp
+++ b/services/mediametrics/Android.bp
@@ -61,6 +61,7 @@
"-bugprone-unhandled-self-assignment", // found in TimeMachine.h
"-bugprone-suspicious-string-compare", // found in TimeMachine.h
"-cert-oop54-cpp", // found in TransactionLog.h
+ "-bugprone-narrowing-conversions", // b/182410845
]
cc_defaults {
diff --git a/services/mediaresourcemanager/Android.bp b/services/mediaresourcemanager/Android.bp
index e503885..e67720c 100644
--- a/services/mediaresourcemanager/Android.bp
+++ b/services/mediaresourcemanager/Android.bp
@@ -13,6 +13,9 @@
srcs: [
"ResourceManagerService.cpp",
"ServiceLog.cpp",
+
+ // TODO: convert to AIDL?
+ "IMediaResourceMonitor.cpp",
],
shared_libs: [
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.cpp b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
new file mode 100644
index 0000000..42d7feb
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.cpp
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "IMediaResourceMonitor.h"
+#include <binder/Parcel.h>
+#include <utils/Errors.h>
+#include <sys/types.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class BpMediaResourceMonitor : public BpInterface<IMediaResourceMonitor> {
+public:
+ explicit BpMediaResourceMonitor(const sp<IBinder>& impl)
+ : BpInterface<IMediaResourceMonitor>(impl) {}
+
+ virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaResourceMonitor::getInterfaceDescriptor());
+ data.writeInt32(pid);
+ data.writeInt32(type);
+ remote()->transact(NOTIFY_RESOURCE_GRANTED, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(MediaResourceMonitor, "android.media.IMediaResourceMonitor")
+
+// ----------------------------------------------------------------------
+
+// NOLINTNEXTLINE(google-default-arguments)
+status_t BnMediaResourceMonitor::onTransact( uint32_t code, const Parcel& data, Parcel* reply,
+ uint32_t flags) {
+ switch(code) {
+ case NOTIFY_RESOURCE_GRANTED: {
+ CHECK_INTERFACE(IMediaResourceMonitor, data, reply);
+ int32_t pid = data.readInt32();
+ const int32_t type = data.readInt32();
+ notifyResourceGranted(/*in*/ pid, /*in*/ type);
+ return NO_ERROR;
+ } break;
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------
+
+} // namespace android
diff --git a/services/mediaresourcemanager/IMediaResourceMonitor.h b/services/mediaresourcemanager/IMediaResourceMonitor.h
new file mode 100644
index 0000000..f92d557
--- /dev/null
+++ b/services/mediaresourcemanager/IMediaResourceMonitor.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#ifndef __ANDROID_VNDK__
+
+#include <binder/IInterface.h>
+
+namespace android {
+
+// ----------------------------------------------------------------------
+
+class IMediaResourceMonitor : public IInterface {
+public:
+ DECLARE_META_INTERFACE(MediaResourceMonitor)
+
+ // Values should be in sync with Intent.EXTRA_MEDIA_RESOURCE_TYPE_XXX.
+ enum {
+ TYPE_VIDEO_CODEC = 0,
+ TYPE_AUDIO_CODEC = 1,
+ };
+
+ virtual void notifyResourceGranted(/*in*/ int32_t pid, /*in*/ const int32_t type) = 0;
+
+ enum {
+ NOTIFY_RESOURCE_GRANTED = IBinder::FIRST_CALL_TRANSACTION,
+ };
+};
+
+// ----------------------------------------------------------------------
+
+class BnMediaResourceMonitor : public BnInterface<IMediaResourceMonitor> {
+public:
+ // NOLINTNEXTLINE(google-default-arguments)
+ virtual status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply,
+ uint32_t flags = 0);
+};
+
+// ----------------------------------------------------------------------
+
+} // namespace android
+
+#else // __ANDROID_VNDK__
+#error "This header is not visible to vendors"
+#endif // __ANDROID_VNDK__
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index db06a36..1695228 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -21,7 +21,7 @@
#include <android/binder_manager.h>
#include <android/binder_process.h>
-#include <binder/IMediaResourceMonitor.h>
+#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <cutils/sched_policy.h>
#include <dirent.h>
@@ -35,6 +35,7 @@
#include <sys/time.h>
#include <unistd.h>
+#include "IMediaResourceMonitor.h"
#include "ResourceManagerService.h"
#include "ServiceLog.h"
diff --git a/services/mediatranscoding/tests/Android.bp b/services/mediatranscoding/tests/Android.bp
index 86e047e..a856c05 100644
--- a/services/mediatranscoding/tests/Android.bp
+++ b/services/mediatranscoding/tests/Android.bp
@@ -25,6 +25,7 @@
],
shared_libs: [
+ "libactivitymanager_aidl",
"libbinder",
"libbinder_ndk",
"liblog",