Merge "libmediandk: replace libnativehelper to libnativehelper_lazy"
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 56dd26b..a374dfa 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -906,7 +906,6 @@
work->result = C2_CORRUPTED;
return;
}
- continue;
}
if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
if (mHeaderDecoded == false) {
diff --git a/media/libeffects/lvm/benchmarks/Android.bp b/media/libeffects/lvm/benchmarks/Android.bp
index 60a9772..8a25b85 100644
--- a/media/libeffects/lvm/benchmarks/Android.bp
+++ b/media/libeffects/lvm/benchmarks/Android.bp
@@ -28,6 +28,7 @@
cc_benchmark {
name: "reverb_benchmark",
vendor: true,
+ host_supported: true,
include_dirs: [
"frameworks/av/media/libeffects/lvm/wrapper/Reverb",
],
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index ed7ef7f..a044295 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -140,6 +140,7 @@
},
vendor: true,
+ host_supported: true,
srcs: [
"Reverb/src/LVREV_ApplyNewSettings.cpp",
"Reverb/src/LVREV_ClearAudioBuffers.cpp",
diff --git a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
index 979644c..761c6ce 100644
--- a/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
+++ b/media/libeffects/lvm/lib/Bass/src/LVDBE_Init.cpp
@@ -57,10 +57,7 @@
* Create the instance handle if not already initialised
*/
if (*phInstance == LVM_NULL) {
- *phInstance = calloc(1, sizeof(*pInstance));
- }
- if (*phInstance == LVM_NULL) {
- return LVDBE_NULLADDRESS;
+ *phInstance = new LVDBE_Instance_t;
}
pInstance = (LVDBE_Instance_t*)*phInstance;
@@ -185,6 +182,6 @@
free(pInstance->pData);
pInstance->pData = LVM_NULL;
}
- free(pInstance);
+ delete pInstance;
*phInstance = LVM_NULL;
}
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
index c1b375e..b092970 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Init.cpp
@@ -93,10 +93,7 @@
/*
* Create the instance handle
*/
- *phInstance = (LVM_Handle_t)calloc(1, sizeof(*pInstance));
- if (*phInstance == LVM_NULL) {
- return LVM_NULLADDRESS;
- }
+ *phInstance = new LVM_Instance_t;
pInstance = (LVM_Instance_t*)*phInstance;
pInstance->InstParams = *pInstParams;
@@ -543,7 +540,7 @@
pInstance->pPSAInput = LVM_NULL;
}
- free(*phInstance);
+ delete pInstance;
return;
}
diff --git a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
index b95d076..5cdcf35 100644
--- a/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
+++ b/media/libeffects/lvm/lib/Common/lib/LVM_Types.h
@@ -121,6 +121,19 @@
LVM_FS_DUMMY = LVM_MAXENUM
} LVM_Fs_en;
+static inline LVM_Fs_en lvmFsForSampleRate(int sampleRate) {
+ static const std::map<int, LVM_Fs_en> kLVMFsMap = {
+ {8000, LVM_FS_8000}, {11025, LVM_FS_11025}, {12000, LVM_FS_12000},
+ {16000, LVM_FS_16000}, {22050, LVM_FS_22050}, {24000, LVM_FS_24000},
+ {32000, LVM_FS_32000}, {44100, LVM_FS_44100}, {48000, LVM_FS_48000},
+ {88200, LVM_FS_88200}, {96000, LVM_FS_96000}, {176400, LVM_FS_176400},
+ {192000, LVM_FS_192000}};
+ if (kLVMFsMap.find(sampleRate) != kLVMFsMap.end()) {
+ return kLVMFsMap.at(sampleRate);
+ }
+ return LVM_FS_INVALID;
+}
+
/* Memory Types */
typedef enum {
LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA,
diff --git a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
index 833ee5d..37e6d4d 100644
--- a/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
+++ b/media/libeffects/lvm/lib/Eq/src/LVEQNB_Init.cpp
@@ -52,10 +52,7 @@
LVEQNB_Capabilities_t* pCapabilities, void* pScratch) {
LVEQNB_Instance_t* pInstance;
- *phInstance = calloc(1, sizeof(*pInstance));
- if (phInstance == LVM_NULL) {
- return LVEQNB_NULLADDRESS;
- }
+ *phInstance = new LVEQNB_Instance_t;
pInstance = (LVEQNB_Instance_t*)*phInstance;
pInstance->Capabilities = *pCapabilities;
@@ -146,6 +143,6 @@
free(pInstance->pBiquadType);
pInstance->pBiquadType = LVM_NULL;
}
- free(pInstance);
+ delete pInstance;
*phInstance = LVM_NULL;
}
diff --git a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
index 484787a..489bc6f 100644
--- a/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
+++ b/media/libeffects/lvm/lib/Reverb/lib/LVREV.h
@@ -191,6 +191,23 @@
/****************************************************************************************/
/* */
+/* FUNCTION: LVREV_FreeInstance */
+/* */
+/* DESCRIPTION: */
+/* This function is used to free the internal allocations of the module. */
+/* */
+/* PARAMETERS: */
+/* hInstance Instance handle */
+/* */
+/* RETURNS: */
+/* LVREV_SUCCESS free instance succeeded */
+/* LVREV_NULLADDRESS Instance is NULL */
+/* */
+/****************************************************************************************/
+LVREV_ReturnStatus_en LVREV_FreeInstance(LVREV_Handle_t hInstance);
+
+/****************************************************************************************/
+/* */
/* FUNCTION: LVXX_GetControlParameters */
/* */
/* DESCRIPTION: */
diff --git a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
index 9a797bd..bf71634 100644
--- a/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
+++ b/media/libeffects/lvm/lib/Reverb/src/LVREV_GetInstanceHandle.cpp
@@ -114,7 +114,7 @@
* Set the instance handle if not already initialised
*/
if (*phInstance == LVM_NULL) {
- *phInstance = InstAlloc_AddMember(&SlowData, sizeof(LVREV_Instance_st));
+ *phInstance = new LVREV_Instance_st;
}
pLVREV_Private = (LVREV_Instance_st*)*phInstance;
pLVREV_Private->MemoryTable = *pMemoryTable;
@@ -269,4 +269,27 @@
return LVREV_SUCCESS;
}
+/****************************************************************************************/
+/* */
+/* FUNCTION: LVREV_FreeInstance */
+/* */
+/* DESCRIPTION: */
+/* This function is used to free the internal allocations of the module. */
+/* */
+/* PARAMETERS: */
+/* hInstance Instance handle */
+/* */
+/* RETURNS: */
+/* LVREV_SUCCESS free instance succeeded */
+/* LVREV_NULLADDRESS Instance is NULL */
+/* */
+/****************************************************************************************/
+LVREV_ReturnStatus_en LVREV_FreeInstance(LVREV_Handle_t hInstance) {
+ if (hInstance == LVM_NULL) {
+ return LVREV_NULLADDRESS;
+ }
+
+ delete (LVREV_Instance_st*)hInstance;
+ return LVREV_SUCCESS;
+}
/* End of file */
diff --git a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
index 9874dcc..5ca8543 100644
--- a/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
+++ b/media/libeffects/lvm/lib/SpectrumAnalyzer/src/LVPSA_Init.cpp
@@ -49,10 +49,7 @@
LVM_UINT32 BufferLength = 0;
/* Set the instance handle if not already initialised */
- *phInstance = calloc(1, sizeof(*pLVPSA_Inst));
- if (*phInstance == LVM_NULL) {
- return LVPSA_ERROR_NULLADDRESS;
- }
+ *phInstance = new LVPSA_InstancePr_t;
pLVPSA_Inst = (LVPSA_InstancePr_t*)*phInstance;
pLVPSA_Inst->pScratch = pScratch;
@@ -191,6 +188,6 @@
free(pLVPSA_Inst->pQPD_Taps);
pLVPSA_Inst->pQPD_Taps = LVM_NULL;
}
- free(pLVPSA_Inst);
+ delete pLVPSA_Inst;
*phInstance = LVM_NULL;
}
diff --git a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
index ba3202f..d60b360 100644
--- a/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
+++ b/media/libeffects/lvm/lib/StereoWidening/src/LVCS_Init.cpp
@@ -55,10 +55,7 @@
* Create the instance handle if not already initialised
*/
if (*phInstance == LVM_NULL) {
- *phInstance = calloc(1, sizeof(*pInstance));
- }
- if (*phInstance == LVM_NULL) {
- return LVCS_NULLADDRESS;
+ *phInstance = new LVCS_Instance_t;
}
pInstance = (LVCS_Instance_t*)*phInstance;
@@ -123,7 +120,7 @@
if (pInstance == LVM_NULL) {
return;
}
- free(pInstance);
+ delete pInstance;
*phInstance = LVM_NULL;
return;
}
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 8627c13..f5ff597 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -54,8 +54,9 @@
cc_test {
name: "reverb_test",
- host_supported: false,
+ host_supported: true,
proprietary: true,
+ gtest: false,
include_dirs: [
"frameworks/av/media/libeffects/lvm/wrapper/Reverb",
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index f107b18..b044e16 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -415,52 +415,11 @@
} else {
return -EINVAL;
}
-
- LVM_Fs_en sampleRate;
- switch (plvmConfigParams->samplingFreq) {
- case 8000:
- sampleRate = LVM_FS_8000;
- break;
- case 11025:
- sampleRate = LVM_FS_11025;
- break;
- case 12000:
- sampleRate = LVM_FS_12000;
- break;
- case 16000:
- sampleRate = LVM_FS_16000;
- break;
- case 22050:
- sampleRate = LVM_FS_22050;
- break;
- case 24000:
- sampleRate = LVM_FS_24000;
- break;
- case 32000:
- sampleRate = LVM_FS_32000;
- break;
- case 44100:
- sampleRate = LVM_FS_44100;
- break;
- case 48000:
- sampleRate = LVM_FS_48000;
- break;
- case 88200:
- sampleRate = LVM_FS_88200;
- break;
- case 96000:
- sampleRate = LVM_FS_96000;
- break;
- case 176400:
- sampleRate = LVM_FS_176400;
- break;
- case 192000:
- sampleRate = LVM_FS_192000;
- break;
- default:
- return -EINVAL;
+ params->SampleRate = lvmFsForSampleRate(plvmConfigParams->samplingFreq);
+ if (params->SampleRate == LVM_FS_INVALID) {
+ ALOGE("lvmControl invalid sampling rate %d", plvmConfigParams->samplingFreq);
+ return -EINVAL;
}
- params->SampleRate = sampleRate;
/* Concert Sound parameters */
params->VirtualizerOperatingMode = plvmConfigParams->csEnable;
diff --git a/media/libeffects/lvm/wrapper/Android.bp b/media/libeffects/lvm/wrapper/Android.bp
index 09c4aef..e169e3c 100644
--- a/media/libeffects/lvm/wrapper/Android.bp
+++ b/media/libeffects/lvm/wrapper/Android.bp
@@ -67,6 +67,7 @@
},
vendor: true,
+ host_supported: true,
srcs: ["Reverb/EffectReverb.cpp"],
cppflags: [
@@ -83,7 +84,6 @@
shared_libs: [
"libaudioutils",
"libcutils",
- "libdl",
"liblog",
],
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 46dbf7e..9ccccb4 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -957,51 +957,12 @@
pContext->config = *pConfig;
const LVM_INT16 NrChannels = audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
- switch (pConfig->inputCfg.samplingRate) {
- case 8000:
- SampleRate = LVM_FS_8000;
- pContext->pBundledContext->SamplesPerSecond = 8000 * NrChannels;
- break;
- case 16000:
- SampleRate = LVM_FS_16000;
- pContext->pBundledContext->SamplesPerSecond = 16000 * NrChannels;
- break;
- case 22050:
- SampleRate = LVM_FS_22050;
- pContext->pBundledContext->SamplesPerSecond = 22050 * NrChannels;
- break;
- case 32000:
- SampleRate = LVM_FS_32000;
- pContext->pBundledContext->SamplesPerSecond = 32000 * NrChannels;
- break;
- case 44100:
- SampleRate = LVM_FS_44100;
- pContext->pBundledContext->SamplesPerSecond = 44100 * NrChannels;
- break;
- case 48000:
- SampleRate = LVM_FS_48000;
- pContext->pBundledContext->SamplesPerSecond = 48000 * NrChannels;
- break;
- case 88200:
- SampleRate = LVM_FS_88200;
- pContext->pBundledContext->SamplesPerSecond = 88200 * NrChannels;
- break;
- case 96000:
- SampleRate = LVM_FS_96000;
- pContext->pBundledContext->SamplesPerSecond = 96000 * NrChannels;
- break;
- case 176400:
- SampleRate = LVM_FS_176400;
- pContext->pBundledContext->SamplesPerSecond = 176400 * NrChannels;
- break;
- case 192000:
- SampleRate = LVM_FS_192000;
- pContext->pBundledContext->SamplesPerSecond = 192000 * NrChannels;
- break;
- default:
- ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
- return -EINVAL;
+ SampleRate = lvmFsForSampleRate(pConfig->inputCfg.samplingRate);
+ if (SampleRate == LVM_FS_INVALID) {
+ ALOGV("Effect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+ return -EINVAL;
}
+ pContext->pBundledContext->SamplesPerSecond = pConfig->inputCfg.samplingRate * NrChannels;
if (pContext->pBundledContext->SampleRate != SampleRate ||
pContext->pBundledContext->ChMask != pConfig->inputCfg.channels) {
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index 9ea70ce..4489e81 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -190,8 +190,8 @@
/* Effect Library Interface Implementation */
-extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t sessionId __unused,
- int32_t ioId __unused, effect_handle_t* pHandle) {
+extern "C" int EffectCreate(const effect_uuid_t* uuid, int32_t /* sessionId __unused */,
+ int32_t /* ioId __unused */, effect_handle_t* pHandle) {
int ret;
int i;
int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t*);
@@ -517,6 +517,9 @@
}
}
}
+
+ LvmStatus = LVREV_FreeInstance(pContext->hInstance);
+ LVM_ERROR_CHECK(LvmStatus, "LVREV_FreeInstance", "Reverb_free")
} /* end Reverb_free */
//----------------------------------------------------------------------------
@@ -553,40 +556,10 @@
// ALOGV("\tReverb_setConfig calling memcpy");
pContext->config = *pConfig;
- switch (pConfig->inputCfg.samplingRate) {
- case 8000:
- SampleRate = LVM_FS_8000;
- break;
- case 16000:
- SampleRate = LVM_FS_16000;
- break;
- case 22050:
- SampleRate = LVM_FS_22050;
- break;
- case 32000:
- SampleRate = LVM_FS_32000;
- break;
- case 44100:
- SampleRate = LVM_FS_44100;
- break;
- case 48000:
- SampleRate = LVM_FS_48000;
- break;
- case 88200:
- SampleRate = LVM_FS_88200;
- break;
- case 96000:
- SampleRate = LVM_FS_96000;
- break;
- case 176400:
- SampleRate = LVM_FS_176400;
- break;
- case 192000:
- SampleRate = LVM_FS_192000;
- break;
- default:
- ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
- return -EINVAL;
+ SampleRate = lvmFsForSampleRate(pConfig->inputCfg.samplingRate);
+ if (SampleRate == LVM_FS_INVALID) {
+ ALOGE("Reverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate);
+ return -EINVAL;
}
if (pContext->SampleRate != SampleRate) {
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index ac86f72..154988d 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -66,6 +66,7 @@
ENABLE_AUDIO_DEVICE_CALLBACK,
GET_ACTIVE_MICROPHONES,
GET_PORT_ID,
+ GET_RTP_DATA_USAGE,
SET_PREFERRED_MICROPHONE_DIRECTION,
SET_PREFERRED_MICROPHONE_FIELD_DIMENSION,
SET_PRIVACY_SENSITIVE,
@@ -476,6 +477,23 @@
*portId = (audio_port_handle_t)reply.readInt32();
return NO_ERROR;
}
+
+ status_t getRtpDataUsage(uint64_t *bytes)
+ {
+ ALOGV("getRtpDataUsage");
+ if (bytes == nullptr) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ status_t status = remote()->transact(GET_RTP_DATA_USAGE, data, &reply);
+ if (status != OK
+ || (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ *bytes = 0;
+ return status;
+ }
+ return reply.readUint64(bytes);
+ }
};
IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
@@ -759,6 +777,17 @@
}
return NO_ERROR;
}
+ case GET_RTP_DATA_USAGE: {
+ ALOGV("GET_RTP_DATA_USAGE");
+ CHECK_INTERFACE(IMediaRecorder, data, reply);
+ uint64_t bytes;
+ status_t status = getRtpDataUsage(&bytes);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeUint64(bytes);
+ }
+ return NO_ERROR;
+ }
case SET_PREFERRED_MICROPHONE_DIRECTION: {
ALOGV("SET_PREFERRED_MICROPHONE_DIRECTION");
CHECK_INTERFACE(IMediaRecorder, data, reply);
diff --git a/media/libmedia/include/media/IMediaRecorder.h b/media/libmedia/include/media/IMediaRecorder.h
index 651bd5e..6e69782 100644
--- a/media/libmedia/include/media/IMediaRecorder.h
+++ b/media/libmedia/include/media/IMediaRecorder.h
@@ -78,6 +78,7 @@
virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
virtual status_t getPortId(audio_port_handle_t *portId) = 0;
+ virtual status_t getRtpDataUsage(uint64_t *bytes) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/media/libmedia/include/media/MediaRecorderBase.h b/media/libmedia/include/media/MediaRecorderBase.h
index 8493f64..d9a7efb 100644
--- a/media/libmedia/include/media/MediaRecorderBase.h
+++ b/media/libmedia/include/media/MediaRecorderBase.h
@@ -77,6 +77,7 @@
virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction) = 0;
virtual status_t setPreferredMicrophoneFieldDimension(float zoom) = 0;
virtual status_t getPortId(audio_port_handle_t *portId) const = 0;
+ virtual status_t getRtpDataUsage(uint64_t *bytes) = 0;
diff --git a/media/libmedia/include/media/mediarecorder.h b/media/libmedia/include/media/mediarecorder.h
index fbcdb28..84c92f6 100644
--- a/media/libmedia/include/media/mediarecorder.h
+++ b/media/libmedia/include/media/mediarecorder.h
@@ -270,6 +270,7 @@
status_t setPreferredMicrophoneFieldDimension(float zoom);
status_t getPortId(audio_port_handle_t *portId) const;
+ status_t getRtpDataUsage(uint64_t *bytes);
private:
void doCleanUp();
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index d9d1f25..e3cd9d8 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -913,4 +913,14 @@
return mMediaRecorder->getPortId(portId);
}
+status_t MediaRecorder::getRtpDataUsage(uint64_t *bytes)
+{
+ ALOGV("getRtpDataUsage");
+
+ if (mMediaRecorder == NULL) {
+ ALOGE("media recorder is not initialized yet");
+ return INVALID_OPERATION;
+ }
+ return mMediaRecorder->getRtpDataUsage(bytes);
+}
} // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 9b1974b..57856fb 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -585,4 +585,13 @@
}
return NO_INIT;
}
+
+status_t MediaRecorderClient::getRtpDataUsage(uint64_t *bytes) {
+ ALOGV("getRtpDataUsage");
+ Mutex::Autolock lock(mLock);
+ if (mRecorder != NULL) {
+ return mRecorder->getRtpDataUsage(bytes);
+ }
+ return NO_INIT;
+}
}; // namespace android
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 12257e5..e041855 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -86,6 +86,7 @@
virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
status_t getPortId(audio_port_handle_t *portId) override;
+ virtual status_t getRtpDataUsage(uint64_t *bytes);
private:
friend class MediaPlayerService; // for accessing private constructor
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index b2f6407..ecbdf61 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -2568,6 +2568,14 @@
return NO_INIT;
}
+status_t StagefrightRecorder::getRtpDataUsage(uint64_t *bytes) {
+ if (mWriter != 0) {
+ *bytes = mWriter->getAccumulativeBytes();
+ return OK;
+ }
+ return NO_INIT;
+}
+
status_t StagefrightRecorder::dump(
int fd, const Vector<String16>& args) const {
ALOGV("dump");
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 0362edd..4bba869 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -82,6 +82,7 @@
virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
status_t getPortId(audio_port_handle_t *portId) const override;
+ virtual status_t getRtpDataUsage(uint64_t *bytes);
private:
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 47362ef..389249e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2858,23 +2858,24 @@
in.writeInt32(payloadType);
switch (payloadType) {
- case NuPlayer::RTPSource::RTCP_TSFB: // RTCP TSFB
- case NuPlayer::RTPSource::RTCP_PSFB: // RTCP PSFB
- case NuPlayer::RTPSource::RTP_AUTODOWN:
+ case ARTPSource::RTCP_TSFB: // RTCP TSFB
+ case ARTPSource::RTCP_PSFB: // RTCP PSFB
+ case ARTPSource::RTP_AUTODOWN:
{
int32_t feedbackType, id;
CHECK(msg->findInt32("feedback-type", &feedbackType));
CHECK(msg->findInt32("sender", &id));
in.writeInt32(feedbackType);
in.writeInt32(id);
- if (payloadType == NuPlayer::RTPSource::RTCP_TSFB) {
+ if (payloadType == ARTPSource::RTCP_TSFB) {
int32_t bitrate;
CHECK(msg->findInt32("bit-rate", &bitrate));
in.writeInt32(bitrate);
}
break;
}
- case NuPlayer::RTPSource::RTP_QUALITY:
+ case ARTPSource::RTP_QUALITY:
+ case ARTPSource::RTP_QUALITY_EMC:
{
int32_t feedbackType, bitrate;
int32_t highestSeqNum, baseSeqNum, prevExpected;
@@ -2895,7 +2896,7 @@
in.writeInt32(prevNumBufRecv);
break;
}
- case NuPlayer::RTPSource::RTP_CVO:
+ case ARTPSource::RTP_CVO:
{
int32_t cvo;
CHECK(msg->findInt32("cvo", &cvo));
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index b1901e8..b43df38 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -45,8 +45,18 @@
mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
mEOSTimeoutAudio(0),
mEOSTimeoutVideo(0),
- mLastCVOUpdated(-1) {
- ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
+ mFirstAccessUnit(true),
+ mAllTracksHaveTime(false),
+ mNTPAnchorUs(-1),
+ mMediaAnchorUs(-1),
+ mLastMediaTimeUs(-1),
+ mNumAccessUnitsReceived(0),
+ mLastCVOUpdated(-1),
+ mReceivedFirstRTCPPacket(false),
+ mReceivedFirstRTPPacket(false),
+ mPausing(false),
+ mPauseGeneration(0) {
+ ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
}
NuPlayer::RTPSource::~RTPSource() {
@@ -289,7 +299,7 @@
if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
cvo != mLastCVOUpdated) {
sp<AMessage> msg = new AMessage();
- msg->setInt32("payload-type", NuPlayer::RTPSource::RTP_CVO);
+ msg->setInt32("payload-type", ARTPSource::RTP_CVO);
msg->setInt32("cvo", cvo);
sp<AMessage> notify = dupNotify();
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
index fb2d3b9..3b4f9e9 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -33,6 +33,7 @@
#include "AnotherPacketSource.h"
#include "APacketSource.h"
#include "ARTPConnection.h"
+#include "ARTPSource.h"
#include "ASessionDescription.h"
#include "NuPlayerSource.h"
@@ -51,16 +52,6 @@
const sp<AMessage> ¬ify,
const String8& rtpParams);
- enum {
- RTP_FIRST_PACKET = 100,
- RTCP_FIRST_PACKET = 101,
- RTP_QUALITY = 102,
- RTCP_TSFB = 205,
- RTCP_PSFB = 206,
- RTP_CVO = 300,
- RTP_AUTODOWN = 400,
- };
-
virtual status_t getBufferingSettings(
BufferingSettings* buffering /* nonnull */) override;
virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 8f1da0d..71a4ad8 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -6860,6 +6860,7 @@
ALOGV("onAllocateComponent");
CHECK(mCodec->mOMXNode == NULL);
+ mCodec->mFatalError = false;
sp<AMessage> notify = new AMessage(kWhatOMXMessageList, mCodec);
notify->setInt32("generation", mCodec->mNodeGeneration + 1);
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 17b1abf..9f20185 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -58,6 +58,7 @@
virtual void updatePayloadType(int32_t /*payloadType*/) {}
virtual void updateSocketNetwork(int64_t /*socketNetwork*/) {}
virtual uint32_t getSequenceNum() { return 0; }
+ virtual uint64_t getAccumulativeBytes() { return 0; }
protected:
virtual ~MediaWriter() {}
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 72a377d..2f93d5d 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -112,24 +112,25 @@
ARTPAssembler::AssemblyStatus AAVCAssembler::addNALUnit(
const sp<ARTPSource> &source) {
List<sp<ABuffer> > *queue = source->queue();
+ const uint32_t firstRTPTime = source->mFirstRtpTime;
if (queue->empty()) {
return NOT_ENOUGH_DATA;
}
sp<ABuffer> buffer = *queue->begin();
- uint32_t rtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
buffer->meta()->setObject("source", source);
+ int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+
int64_t startTime = source->mFirstSysTime / 1000;
int64_t nowTime = ALooper::GetNowUs() / 1000;
int64_t playedTime = nowTime - startTime;
- int64_t playedTimeRtp =
- source->mFirstRtpTime + (((uint32_t)playedTime) * (source->mClockRate / 1000));
- const uint32_t jitterTime =
- (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
- uint32_t expiredTimeInJb = rtpTime + jitterTime;
+
+ int64_t playedTimeRtp = source->mFirstRtpTime + playedTime * (int64_t)source->mClockRate / 1000;
+ const int64_t jitterTime = source->mJbTimeMs * (int64_t)source->mClockRate / 1000;
+
+ int64_t expiredTimeInJb = rtpTime + jitterTime;
bool isExpired = expiredTimeInJb <= (playedTimeRtp);
bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
@@ -154,11 +155,11 @@
if (isTooLate300) {
ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
- ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+ (long long)(playedTimeRtp - expiredTimeInJb), buffer->int32Data());
printNowTimeUs(startTime, nowTime, playedTime);
printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
- mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+ mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTime);
}
if (mNextExpectedSeqNoValid) {
@@ -564,14 +565,25 @@
msg->post();
}
-int32_t AAVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+inline int64_t AAVCAssembler::findRTPTime(
+ const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
+ /* If you want to +, -, * rtpTime, recommend to declare rtpTime as int64_t.
+ Because rtpTime can be near UINT32_MAX. Beware the overflow. */
+ int64_t rtpTime = 0;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
+ int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
+ return rtpTime | overflowMask;
+}
+
+int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
+ uint32_t first, int64_t play, int64_t jit) {
sp<ABuffer> buffer = *(queue->begin());
- uint32_t rtpTime;
int32_t nextSeqNo = buffer->int32Data();
Queue::const_iterator it = queue->begin();
while (it != queue->end()) {
- CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ int64_t rtpTime = findRTPTime(first, *it);
// if pkt in time exists, that should be the next pivot
if (rtpTime + jit >= play) {
nextSeqNo = (*it)->int32Data();
@@ -613,9 +625,9 @@
(long long)start, (long long)now, (long long)play);
}
-inline void AAVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
- ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
- rtp, (long long)play, exp, isExp);
+inline void AAVCAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
+ ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
+ (long long)rtp, (long long)play, (long long)exp, isExp);
}
ARTPAssembler::AssemblyStatus AAVCAssembler::assembleMore(
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 79fc7c2..9d71e2f 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -63,12 +63,13 @@
void submitAccessUnit();
- int32_t pickProperSeq(const Queue *q, uint32_t jit, int64_t play);
+ inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+ int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
size_t avail, float goodRatio);
int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
void printNowTimeUs(int64_t start, int64_t now, int64_t play);
- void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+ void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
};
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index 148a0ba..553ea08 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -122,6 +122,7 @@
ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
const sp<ARTPSource> &source) {
List<sp<ABuffer> > *queue = source->queue();
+ const uint32_t firstRTPTime = source->mFirstRtpTime;
if (queue->empty()) {
return NOT_ENOUGH_DATA;
@@ -129,15 +130,15 @@
sp<ABuffer> buffer = *queue->begin();
buffer->meta()->setObject("source", source);
- uint32_t rtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ int64_t rtpTime = findRTPTime(firstRTPTime, buffer);
+
int64_t startTime = source->mFirstSysTime / 1000;
int64_t nowTime = ALooper::GetNowUs() / 1000;
int64_t playedTime = nowTime - startTime;
- int64_t playedTimeRtp = source->mFirstRtpTime +
- (((uint32_t)playedTime) * (source->mClockRate / 1000));
- const uint32_t jitterTime = (uint32_t)(source->mClockRate / ((float)1000 / (source->mJbTimeMs)));
- uint32_t expiredTimeInJb = rtpTime + jitterTime;
+ int64_t playedTimeRtp = source->mFirstRtpTime + playedTime * (int64_t)source->mClockRate / 1000;
+ const int64_t jitterTime = source->mJbTimeMs * (int64_t)source->mClockRate / 1000;
+
+ int64_t expiredTimeInJb = rtpTime + jitterTime;
bool isExpired = expiredTimeInJb <= (playedTimeRtp);
bool isTooLate200 = expiredTimeInJb < (playedTimeRtp - jitterTime);
bool isTooLate300 = expiredTimeInJb < (playedTimeRtp - (jitterTime * 3 / 2));
@@ -162,11 +163,11 @@
if (isTooLate300) {
ALOGW("buffer arrived after 300ms ... \t Diff in Jb=%lld \t Seq# %d",
- ((long long)playedTimeRtp) - expiredTimeInJb, buffer->int32Data());
+ (long long)(playedTimeRtp - expiredTimeInJb), buffer->int32Data());
printNowTimeUs(startTime, nowTime, playedTime);
printRTPTime(rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
- mNextExpectedSeqNo = pickProperSeq(queue, jitterTime, playedTimeRtp);
+ mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTime);
}
if (mNextExpectedSeqNoValid) {
@@ -577,14 +578,25 @@
msg->post();
}
-int32_t AHEVCAssembler::pickProperSeq(const Queue *queue, uint32_t jit, int64_t play) {
+inline int64_t AHEVCAssembler::findRTPTime(
+ const uint32_t& firstRTPTime, const sp<ABuffer>& buffer) {
+ /* If you want to +, -, * rtpTime, recommend to declare rtpTime as int64_t.
+ Because rtpTime can be near UINT32_MAX. Beware the overflow. */
+ int64_t rtpTime = 0;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ // If the first overs 2^31 and rtp unders 2^31, the rtp value is overflowed one.
+ int64_t overflowMask = (firstRTPTime & 0x80000000 & ~rtpTime) << 1;
+ return rtpTime | overflowMask;
+}
+
+int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
+ uint32_t first, int64_t play, int64_t jit) {
sp<ABuffer> buffer = *(queue->begin());
- uint32_t rtpTime;
int32_t nextSeqNo = buffer->int32Data();
Queue::const_iterator it = queue->begin();
while (it != queue->end()) {
- CHECK((*it)->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ int64_t rtpTime = findRTPTime(first, *it);
// if pkt in time exists, that should be the next pivot
if (rtpTime + jit >= play) {
nextSeqNo = (*it)->int32Data();
@@ -626,12 +638,11 @@
(long long)start, (long long)now, (long long)play);
}
-inline void AHEVCAssembler::printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp) {
- ALOGD("rtp-time(JB)=%u, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%u isExpired=%d",
- rtp, (long long)play, exp, isExp);
+inline void AHEVCAssembler::printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp) {
+ ALOGD("rtp-time(JB)=%lld, played-rtp-time(JB)=%lld, expired-rtp-time(JB)=%lld expired=%d",
+ (long long)rtp, (long long)play, (long long)exp, isExp);
}
-
ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
const sp<ARTPSource> &source) {
AssemblyStatus status = addNALUnit(source);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index 16fc1c8..bf1cded 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -64,12 +64,13 @@
void submitAccessUnit();
- int32_t pickProperSeq(const Queue *queue, uint32_t jit, int64_t play);
- bool recycleUnit(uint32_t start, uint32_t end, uint32_t conneceted,
+ inline int64_t findRTPTime(const uint32_t& firstRTPTime, const sp<ABuffer>& buffer);
+ int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
+ bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
size_t avail, float goodRatio);
int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
void printNowTimeUs(int64_t start, int64_t now, int64_t play);
- void printRTPTime(uint32_t rtp, int64_t play, uint32_t exp, bool isExp);
+ void printRTPTime(int64_t rtp, int64_t play, int64_t exp, bool isExp);
DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
};
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 97a9bbb..61c06d1 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -464,6 +464,22 @@
ALOGD("Send FIR immediately for lost Packets");
send(&*it, buffer);
}
+
+ buffer->setRange(0, 0);
+ it->mSources.valueAt(i)->addTMMBR(buffer, mTargetBitrate);
+ mTargetBitrate = -1;
+ if (buffer->size() > 0) {
+ ALOGV("Sending TMMBR...");
+ ssize_t n = send(&*it, buffer);
+
+ if (n != (ssize_t)buffer->size()) {
+ ALOGW("failed to send RTCP TMMBR (%s).",
+ n >= 0 ? "connection gone" : strerror(errno));
+
+ it = mStreams.erase(it);
+ continue;
+ }
+ }
}
++it;
@@ -509,16 +525,14 @@
ssize_t n = send(s, buffer);
- if (n <= 0) {
+ if (n != (ssize_t)buffer->size()) {
ALOGW("failed to send RTCP receiver report (%s).",
- n == 0 ? "connection gone" : strerror(errno));
+ n >= 0 ? "connection gone" : strerror(errno));
it = mStreams.erase(it);
continue;
}
- CHECK_EQ(n, (ssize_t)buffer->size());
-
mLastReceiverReportTimeUs = nowUs;
}
@@ -862,6 +876,12 @@
sp<ARTPSource> source = findSource(s, id);
+ // Report a final stastics to be used for rtp data usage.
+ int64_t nowUs = ALooper::GetNowUs();
+ int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+ int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+ source->notifyPktInfo(bitrate, true /* isRegular */);
+
source->byeReceived();
return OK;
@@ -1079,6 +1099,28 @@
mCumulativeBytes = 0;
mLastBitrateReportTimeUs = nowUs;
}
+ else if (mLastEarlyNotifyTimeUs + 100000ll <= nowUs) {
+ int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
+ int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
+ mLastEarlyNotifyTimeUs = nowUs;
+
+ List<StreamInfo>::iterator it = mStreams.begin();
+ while (it != mStreams.end()) {
+ StreamInfo *s = &*it;
+ if (s->mIsInjected) {
+ ++it;
+ continue;
+ }
+ for (size_t i = 0; i < s->mSources.size(); ++i) {
+ sp<ARTPSource> source = s->mSources.valueAt(i);
+ if (source->isNeedToEarlyNotify()) {
+ source->notifyPktInfo(bitrate, false /* isRegular */);
+ mLastEarlyNotifyTimeUs = nowUs + (1000000ll * 3600 * 24); // after 1 day
+ }
+ }
+ ++it;
+ }
+ }
else if (mLastBitrateReportTimeUs + 1000000ll <= nowUs) {
int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
@@ -1101,31 +1143,15 @@
}
buffer->setRange(0, 0);
-
for (size_t i = 0; i < s->mSources.size(); ++i) {
sp<ARTPSource> source = s->mSources.valueAt(i);
- source->notifyPktInfo(bitrate, nowUs);
- source->addTMMBR(buffer, mTargetBitrate);
- }
- if (buffer->size() > 0) {
- ALOGV("Sending TMMBR...");
-
- ssize_t n = send(s, buffer);
-
- if (n <= 0) {
- ALOGW("failed to send RTCP TMMBR (%s).",
- n == 0 ? "connection gone" : strerror(errno));
-
- it = mStreams.erase(it);
- continue;
- }
-
- CHECK_EQ(n, (ssize_t)buffer->size());
+ source->notifyPktInfo(bitrate, true /* isRegular */);
}
++it;
}
mCumulativeBytes = 0;
mLastBitrateReportTimeUs = nowUs;
+ mLastEarlyNotifyTimeUs = nowUs;
}
}
void ARTPConnection::onInjectPacket(const sp<AMessage> &msg) {
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index 7c8218f..a37ac0e 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -84,6 +84,7 @@
bool mPollEventPending;
int64_t mLastReceiverReportTimeUs;
int64_t mLastBitrateReportTimeUs;
+ int64_t mLastEarlyNotifyTimeUs;
int32_t mSelfID;
int32_t mTargetBitrate;
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index c611f6f..3fdf8e4 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -34,6 +34,8 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <strings.h>
+
namespace android {
static uint32_t kSourceID = 0xdeadbeef;
@@ -380,21 +382,24 @@
data[14] = (mID >> 8) & 0xff;
data[15] = mID & 0xff;
- int32_t exp, mantissa;
+ // Find the first bit '1' from left & right side of the value.
+ int32_t leftEnd = 31 - __builtin_clz(targetBitrate);
+ int32_t rightEnd = ffs(targetBitrate) - 1;
- // Round off to the nearest 2^4th
- ALOGI("UE -> Op Req Rx bitrate : %d ", targetBitrate & 0xfffffff0);
- for (exp=4 ; exp < 32 ; exp++)
- if (((targetBitrate >> exp) & 0x01) != 0)
- break;
- mantissa = targetBitrate >> exp;
+ // Mantissa have only 17bit space by RTCP specification.
+ if ((leftEnd - rightEnd) > 16) {
+ rightEnd = leftEnd - 16;
+ }
+ int32_t mantissa = targetBitrate >> rightEnd;
- data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
- data[17] = (mantissa & 0x07f80) >> 7;
- data[18] = (mantissa & 0x0007f) << 1;
+ data[16] = ((rightEnd << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+ data[17] = (mantissa & 0x07f80) >> 7;
+ data[18] = (mantissa & 0x0007f) << 1;
data[19] = 40; // 40 bytes overhead;
buffer->setRange(buffer->offset(), buffer->size() + (data[3] + 1) * sizeof(int32_t));
+
+ ALOGI("UE -> Op Req Rx bitrate : %d ", mantissa << rightEnd);
}
int ARTPSource::addNACK(const sp<ABuffer> &buffer) {
@@ -512,10 +517,22 @@
mIssueFIRRequests = enable;
}
-void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t /*time*/) {
+bool ARTPSource::isNeedToEarlyNotify() {
+ uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+ int32_t intervalExpectedInNow = expected - mPrevExpected;
+ int32_t intervalReceivedInNow = mNumBuffersReceived - mPrevNumBuffersReceived;
+
+ if (intervalExpectedInNow - intervalReceivedInNow > 5)
+ return true;
+ return false;
+}
+
+void ARTPSource::notifyPktInfo(int32_t bitrate, bool isRegular) {
+ int32_t payloadType = isRegular ? RTP_QUALITY : RTP_QUALITY_EMC;
+
sp<AMessage> notify = mNotify->dup();
notify->setInt32("rtcp-event", 1);
- notify->setInt32("payload-type", 102);
+ notify->setInt32("payload-type", payloadType);
notify->setInt32("feedback-type", 0);
// sending target bitrate up to application to share rtp quality.
notify->setInt32("bit-rate", bitrate);
@@ -526,9 +543,11 @@
notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
notify->post();
- uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
- mPrevExpected = expected;
- mPrevNumBuffersReceived = mNumBuffersReceived;
+ if (isRegular) {
+ uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
+ mPrevExpected = expected;
+ mPrevNumBuffersReceived = mNumBuffersReceived;
+ }
}
void ARTPSource::onIssueFIRByAssembler() {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index ea683a0..c51fd8a 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -40,6 +40,17 @@
const sp<ASessionDescription> &sessionDesc, size_t index,
const sp<AMessage> ¬ify);
+ enum {
+ RTP_FIRST_PACKET = 100,
+ RTCP_FIRST_PACKET = 101,
+ RTP_QUALITY = 102,
+ RTP_QUALITY_EMC = 103,
+ RTCP_TSFB = 205,
+ RTCP_PSFB = 206,
+ RTP_CVO = 300,
+ RTP_AUTODOWN = 400,
+ };
+
void processRTPPacket(const sp<ABuffer> &buffer);
void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
void byeReceived();
@@ -55,7 +66,8 @@
void setSelfID(const uint32_t selfID);
void setJbTime(const uint32_t jbTimeMs);
void setPeriodicFIR(bool enable);
- void notifyPktInfo(int32_t bitrate, int64_t time);
+ bool isNeedToEarlyNotify();
+ void notifyPktInfo(int32_t bitrate, bool isRegular);
// FIR needs to be sent by missing packet or broken video image.
void onIssueFIRByAssembler();
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 76afb04..ec70952 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -20,8 +20,6 @@
#include "ARTPWriter.h"
-#include <fcntl.h>
-
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -32,6 +30,9 @@
#include <media/stagefright/MetaData.h>
#include <utils/ByteOrder.h>
+#include <fcntl.h>
+#include <strings.h>
+
#define PT 97
#define PT_STR "97"
@@ -46,10 +47,12 @@
#define H265_NALU_SPS 0x21
#define H265_NALU_PPS 0x22
-#define LINK_HEADER_SIZE 14
-#define IP_HEADER_SIZE 20
+#define IPV4_HEADER_SIZE 20
+#define IPV6_HEADER_SIZE 40
#define UDP_HEADER_SIZE 8
-#define TCPIP_HEADER_SIZE (LINK_HEADER_SIZE + IP_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIPV4_HEADER_SIZE (IPV4_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIPV6_HEADER_SIZE (IPV6_HEADER_SIZE + UDP_HEADER_SIZE)
+#define TCPIP_HEADER_SIZE TCPIPV4_HEADER_SIZE
#define RTP_HEADER_SIZE 12
#define RTP_HEADER_EXT_SIZE 8
#define RTP_FU_HEADER_SIZE 2
@@ -62,6 +65,9 @@
static const size_t kMaxPacketSize = 1280;
static char kCNAME[255] = "someone@somewhere";
+static const size_t kTrafficRecorderMaxEntries = 128;
+static const size_t kTrafficRecorderMaxTimeSpanMs = 2000;
+
static int UniformRand(int limit) {
return ((double)rand() * limit) / RAND_MAX;
}
@@ -71,7 +77,8 @@
mFd(dup(fd)),
mLooper(new ALooper),
mReflector(new AHandlerReflector<ARTPWriter>(this)),
- mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
+ mTrafficRec(new TrafficRecorder<uint32_t /* Time */, Bytes>(
+ kTrafficRecorderMaxEntries, kTrafficRecorderMaxTimeSpanMs)) {
CHECK_GE(fd, 0);
mIsIPv6 = false;
@@ -122,7 +129,8 @@
mFd(dup(fd)),
mLooper(new ALooper),
mReflector(new AHandlerReflector<ARTPWriter>(this)),
- mTrafficRec(new TrafficRecorder<uint32_t, size_t>(128)) {
+ mTrafficRec(new TrafficRecorder<uint32_t /* Time */, Bytes>(
+ kTrafficRecorderMaxEntries, kTrafficRecorderMaxTimeSpanMs)) {
CHECK_GE(fd, 0);
mIsIPv6 = false;
@@ -135,7 +143,8 @@
mSPSBuf = NULL;
mPPSBuf = NULL;
- mSeqNo = seqNo;
+ initState();
+ mSeqNo = seqNo; // Must use explicit # of seq for RTP continuity
#if LOG_TO_FILES
mRTPFd = open(
@@ -186,6 +195,29 @@
mFd = -1;
}
+void ARTPWriter::initState() {
+ if (mSourceID == 0)
+ mSourceID = rand();
+ mPayloadType = 0;
+ if (mSeqNo == 0)
+ mSeqNo = UniformRand(65536);
+ mRTPTimeBase = 0;
+ mNumRTPSent = 0;
+ mNumRTPOctetsSent = 0;
+ mLastRTPTime = 0;
+ mLastNTPTime = 0;
+
+ mOpponentID = 0;
+ mBitrate = 192000;
+
+ mNumSRsSent = 0;
+ mRTPCVOExtMap = -1;
+ mRTPCVODegrees = 0;
+ mRTPSockNetwork = 0;
+
+ mMode = INVALID;
+}
+
status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
mSource = source;
return OK;
@@ -203,21 +235,7 @@
}
mFlags &= ~kFlagEOS;
- if (mSourceID == 0)
- mSourceID = rand();
- if (mSeqNo == 0)
- mSeqNo = UniformRand(65536);
- mRTPTimeBase = 0;
- mNumRTPSent = 0;
- mNumRTPOctetsSent = 0;
- mLastRTPTime = 0;
- mLastNTPTime = 0;
- mOpponentID = 0;
- mBitrate = 192000;
- mNumSRsSent = 0;
- mRTPCVOExtMap = -1;
- mRTPCVODegrees = 0;
- mRTPSockNetwork = 0;
+ initState();
const char *mime;
CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
@@ -246,7 +264,6 @@
if (params->findInt64(kKeySocketNetwork, &sockNetwork))
updateSocketNetwork(sockNetwork);
- mMode = INVALID;
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
mMode = H264;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
@@ -600,7 +617,8 @@
ALOGW("packets can not be sent. ret=%d, buf=%d", (int)n, (int)buffer->size());
} else {
// Record current traffic & Print bits while last 1sec (1000ms)
- mTrafficRec->writeBytes(buffer->size());
+ mTrafficRec->writeBytes(buffer->size() +
+ (mIsIPv6 ? TCPIPV6_HEADER_SIZE : TCPIPV4_HEADER_SIZE));
mTrafficRec->printAccuBitsForLastPeriod(1000, 1000);
}
@@ -729,21 +747,24 @@
data[14] = (mOpponentID >> 8) & 0xff;
data[15] = mOpponentID & 0xff;
- int32_t exp, mantissa;
+ // Find the first bit '1' from left & right side of the value.
+ int32_t leftEnd = 31 - __builtin_clz(mBitrate);
+ int32_t rightEnd = ffs(mBitrate) - 1;
- // Round off to the nearest 2^4th
- ALOGI("UE -> Op Noti Tx bitrate : %d ", mBitrate & 0xfffffff0);
- for (exp=4 ; exp < 32 ; exp++)
- if (((mBitrate >> exp) & 0x01) != 0)
- break;
- mantissa = mBitrate >> exp;
+ // Mantissa have only 17bit space by RTCP specification.
+ if ((leftEnd - rightEnd) > 16) {
+ rightEnd = leftEnd - 16;
+ }
+ int32_t mantissa = mBitrate >> rightEnd;
- data[16] = ((exp << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
- data[17] = (mantissa & 0x07f80) >> 7;
- data[18] = (mantissa & 0x0007f) << 1;
+ data[16] = ((rightEnd << 2) & 0xfc) | ((mantissa & 0x18000) >> 15);
+ data[17] = (mantissa & 0x07f80) >> 7;
+ data[18] = (mantissa & 0x0007f) << 1;
data[19] = 40; // 40 bytes overhead;
buffer->setRange(buffer->offset(), buffer->size() + 20);
+
+ ALOGI("UE -> Op Noti Tx bitrate : %d ", mantissa << rightEnd);
}
// static
@@ -1362,6 +1383,10 @@
return mSeqNo;
}
+uint64_t ARTPWriter::getAccumulativeBytes() {
+ return mTrafficRec->readBytesForTotal();
+}
+
static size_t getFrameSize(bool isWide, unsigned FT) {
static const size_t kFrameSizeNB[8] = {
95, 103, 118, 134, 148, 159, 204, 244
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 6f25a66..28d6ec5 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -53,6 +53,7 @@
void updateSocketDscp(int32_t dscp);
void updateSocketNetwork(int64_t socketNetwork);
uint32_t getSequenceNum();
+ virtual uint64_t getAccumulativeBytes() override;
virtual void onMessageReceived(const sp<AMessage> &msg);
virtual void setTMMBNInfo(uint32_t opponentID, uint32_t bitrate);
@@ -118,7 +119,8 @@
uint32_t mOpponentID;
uint32_t mBitrate;
- sp<TrafficRecorder<uint32_t, size_t> > mTrafficRec;
+ typedef uint64_t Bytes;
+ sp<TrafficRecorder<uint32_t /* Time */, Bytes> > mTrafficRec;
int32_t mNumSRsSent;
int32_t mRTPCVOExtMap;
@@ -135,6 +137,7 @@
static uint64_t GetNowNTP();
+ void initState();
void onRead(const sp<AMessage> &msg);
void onSendSR(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/TrafficRecorder.h b/media/libstagefright/rtsp/TrafficRecorder.h
index f8e7c03..8ba8f90 100644
--- a/media/libstagefright/rtsp/TrafficRecorder.h
+++ b/media/libstagefright/rtsp/TrafficRecorder.h
@@ -27,44 +27,49 @@
template <class Time, class Bytes>
class TrafficRecorder : public RefBase {
private:
+ constexpr static size_t kMinNumEntries = 4;
+ constexpr static size_t kMaxNumEntries = 1024;
+
size_t mSize;
size_t mSizeMask;
Time *mTimeArray = NULL;
Bytes *mBytesArray = NULL;
- size_t mHeadIdx = 0;
- size_t mTailIdx = 0;
+ size_t mHeadIdx;
+ size_t mTailIdx;
- Time mClock = 0;
- Time mLastTimeOfPrint = 0;
- Bytes mAccuBytesOfPrint = 0;
+ int mLastReadIdx;
+
+ const Time mRecordLimit;
+ Time mClock;
+ Time mLastTimeOfPrint;
+ Bytes mAccuBytes;
+
public:
- TrafficRecorder();
- TrafficRecorder(size_t size);
+ TrafficRecorder(size_t size, Time accuTimeLimit);
virtual ~TrafficRecorder();
void init();
-
void updateClock(Time now);
-
+ Bytes readBytesForTotal();
Bytes readBytesForLastPeriod(Time period);
void writeBytes(Bytes bytes);
-
void printAccuBitsForLastPeriod(Time period, Time unit);
};
template <class Time, class Bytes>
-TrafficRecorder<Time, Bytes>::TrafficRecorder() {
- TrafficRecorder(128);
-}
-
-template <class Time, class Bytes>
-TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size) {
- size_t exp;
- for (exp = 0; exp < 32; exp++) {
- if (size <= (1ul << exp)) {
- break;
- }
+TrafficRecorder<Time, Bytes>::TrafficRecorder(size_t size, Time recordLimit)
+ : mRecordLimit(recordLimit) {
+ if (size > kMaxNumEntries) {
+ LOG(VERBOSE) << "Limiting TrafficRecorder size to " << kMaxNumEntries;
+ size = kMaxNumEntries;
+ } else if (size < kMinNumEntries) {
+ LOG(VERBOSE) << "Limiting TrafficRecorder size to " << kMaxNumEntries;
+ size = kMinNumEntries;
}
+
+ size_t exp = ((sizeof(size_t) == 8) ?
+ 64 - __builtin_clzl(size - 1) :
+ 32 - __builtin_clz(size - 1));
mSize = (1ul << exp); // size = 2^exp
mSizeMask = mSize - 1;
@@ -84,9 +89,15 @@
template <class Time, class Bytes>
void TrafficRecorder<Time, Bytes>::init() {
mHeadIdx = 0;
- mTailIdx = 0;
- mTimeArray[0] = 0;
- mBytesArray[0] = 0;
+ mTailIdx = mSizeMask;
+ for (int i = 0 ; i < mSize ; i++) {
+ mTimeArray[i] = 0;
+ mBytesArray[i] = 0;
+ }
+ mClock = 0;
+ mLastReadIdx = 0;
+ mLastTimeOfPrint = 0;
+ mAccuBytes = 0;
}
template <class Time, class Bytes>
@@ -95,54 +106,71 @@
}
template <class Time, class Bytes>
-Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
- Bytes bytes = 0;
+Bytes TrafficRecorder<Time, Bytes>::readBytesForTotal() {
+ return mAccuBytes;
+}
- size_t i = mTailIdx;
- while (i != mHeadIdx) {
- LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i] << " \t EndOfPeriod " << mClock - period;
+template <class Time, class Bytes>
+Bytes TrafficRecorder<Time, Bytes>::readBytesForLastPeriod(Time period) {
+ // Not enough data
+ if (period > mClock)
+ return 0;
+
+ Bytes bytes = 0;
+ int i = mHeadIdx;
+ while (i != mTailIdx) {
+ LOG(VERBOSE) << "READ " << i << " time " << mTimeArray[i]
+ << " \t EndOfPeriod " << mClock - period
+ << "\t\t Bytes:" << mBytesArray[i] << "\t\t Accu: " << bytes;
if (mTimeArray[i] < mClock - period) {
break;
}
bytes += mBytesArray[i];
- i = (i + mSize - 1) & mSizeMask;
+ i = (i - 1) & mSizeMask;
}
- mHeadIdx = i;
+ mLastReadIdx = (i + 1) & mSizeMask;
+
return bytes;
}
template <class Time, class Bytes>
void TrafficRecorder<Time, Bytes>::writeBytes(Bytes bytes) {
- size_t writeIdx;
- if (mClock == mTimeArray[mTailIdx]) {
- writeIdx = mTailIdx;
+ int writeIdx;
+ if (mClock == mTimeArray[mHeadIdx]) {
+ writeIdx = mHeadIdx;
mBytesArray[writeIdx] += bytes;
} else {
- writeIdx = (mTailIdx + 1) % mSize;
+ writeIdx = (mHeadIdx + 1) & mSizeMask;
mTimeArray[writeIdx] = mClock;
mBytesArray[writeIdx] = bytes;
}
LOG(VERBOSE) << "WRITE " << writeIdx << " time " << mClock;
- if (writeIdx == mHeadIdx) {
- LOG(WARNING) << "Traffic recorder size exceeded at " << mHeadIdx;
- mHeadIdx = (mHeadIdx + 1) & mSizeMask;
+ if (writeIdx == mTailIdx) {
+ mTailIdx = (mTailIdx + 1) & mSizeMask;
}
- mTailIdx = writeIdx;
- mAccuBytesOfPrint += bytes;
+ mHeadIdx = writeIdx;
+ mAccuBytes += bytes;
}
template <class Time, class Bytes>
void TrafficRecorder<Time, Bytes>::printAccuBitsForLastPeriod(Time period, Time unit) {
- Time duration = mClock - mLastTimeOfPrint;
- float numOfUnit = (float)duration / unit;
- if (duration > period) {
- ALOGD("Actual Tx period %.0f ms \t %.0f Bits/Unit",
- numOfUnit * 1000.f, mAccuBytesOfPrint * 8.f / numOfUnit);
- mLastTimeOfPrint = mClock;
- mAccuBytesOfPrint = 0;
- init();
+ Time timeSinceLastPrint = mClock - mLastTimeOfPrint;
+ if (timeSinceLastPrint < period)
+ return;
+
+ Bytes sum = readBytesForLastPeriod(period);
+ Time readPeriod = mClock - mTimeArray[mLastReadIdx];
+
+ float numOfUnit = (float)(readPeriod) / (unit + FLT_MIN);
+ ALOGD("Actual Tx period %.3f unit \t %.0f bytes (%.0f Kbits)/Unit",
+ numOfUnit, sum / numOfUnit, sum * 8.f / numOfUnit / 1000.f);
+ mLastTimeOfPrint = mClock;
+
+ if (mClock - mTimeArray[mTailIdx] < mRecordLimit) {
+ // Size is not enough to record bytes for mRecordLimit period
+ ALOGW("Traffic recorder size is not enough. mRecordLimit %d", mRecordLimit);
}
}
diff --git a/media/libstagefright/writer_fuzzers/README.md b/media/libstagefright/writer_fuzzers/README.md
index 0d21031..6f95ecc 100644
--- a/media/libstagefright/writer_fuzzers/README.md
+++ b/media/libstagefright/writer_fuzzers/README.md
@@ -29,7 +29,7 @@
| Parameter| Valid Values| Configured Value|
|------------- |-------------| ----- |
-| `mime` | 0. `audio/3gpp` 1. `audio/amr-wb` 2. `audio/vorbis` 3. `audio/opus` 4. `audio/mp4a-latm` 5. `video/avc` 6. `video/hevc` 7. `video/mp4v-es` 8. `video/3gpp` 9. `video/x-vnd.on2.vp8` 10. `video/x-vnd.on2.vp9` | All the bits of 2nd byte of data for first track and 11th byte of data for second track (if present) modulus 10 |
+| `mime` | 0. `audio/3gpp` 1. `audio/amr-wb` 2. `audio/vorbis` 3. `audio/opus` 4. `audio/mp4a-latm` 5. `audio/mpeg` 6. `audio/mpeg-L1` 7. `audio/mpeg-L2` 8. `audio/midi` 9. `audio/qcelp` 10. `audio/g711-alaw` 11. `audio/g711-mlaw` 12. `audio/flac` 13. `audio/aac-adts` 14. `audio/gsm` 15. `audio/ac3` 16. `audio/eac3` 17. `audio/eac3-joc` 18. `audio/ac4` 19. `audio/scrambled` 20. `audio/alac` 21. `audio/x-ms-wma` 22. `audio/x-adpcm-ms` 23. `audio/x-adpcm-dvi-ima` 24. `video/avc` 25. `video/hevc` 26. `video/mp4v-es` 27. `video/3gpp` 28. `video/x-vnd.on2.vp8` 29. `video/x-vnd.on2.vp9` 30. `video/av01` 31. `video/mpeg2` 32. `video/dolby-vision` 33. `video/scrambled` 34. `video/divx` 35. `video/divx3` 36. `video/xvid` 37. `video/x-motion-jpeg` 38. `text/3gpp-tt` 39. `application/x-subrip` 40. `text/vtt` 41. `text/cea-608` 42. `text/cea-708` 43. `application/x-id3v4` | All the bits of 2nd byte of data for first track and 11th byte of data for second track and 20th byte of data for third track(if present) modulus 44 |
| `channel-count` | In the range `0 to INT32_MAX` | All the bits of 3rd byte to 6th bytes of data if first track is audio and 12th to 15th bytes of data if second track is audio |
| `sample-rate` | In the range `1 to INT32_MAX` | All the bits of 7th byte to 10th bytes of data if first track is audio and 16th to 19th bytes of data if second track is audio |
| `height` | In the range `0 to INT32_MAX` | All the bits of 3rd byte to 6th bytes of data if first track is video and 12th to 15th bytes of data if second track is video |
diff --git a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
index 844db39..ee7af70 100644
--- a/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
+++ b/media/libstagefright/writer_fuzzers/WriterFuzzerBase.cpp
@@ -53,7 +53,7 @@
return mNumCsds[trackIndex];
}
-vector<FrameData> WriterFuzzerBase::BufferSource::getFrameList(int32_t trackIndex) {
+vector<FrameData> &WriterFuzzerBase::BufferSource::getFrameList(int32_t trackIndex) {
return mFrameList[trackIndex];
}
@@ -92,9 +92,8 @@
} else {
break;
}
- mFrameList[trackIndex].insert(
- mFrameList[trackIndex].begin(),
- FrameData{static_cast<int32_t>(bufferSize), flags, pts, framePtr});
+ mFrameList[trackIndex].insert(mFrameList[trackIndex].begin(),
+ FrameData{bufferSize, flags, pts, framePtr});
bytesRemaining -= (frameSize + kMarkerSize + kMarkerSuffixSize);
--mReadIndex;
}
@@ -105,31 +104,36 @@
* Scenario where input data does not contain the custom frame markers.
* Hence feed the entire data as single frame.
*/
- mFrameList[0].emplace_back(
- FrameData{static_cast<int32_t>(mSize - readIndexStart), 0, 0, mData + readIndexStart});
+ mFrameList[0].emplace_back(FrameData{mSize - readIndexStart, 0, 0, mData + readIndexStart});
}
}
bool WriterFuzzerBase::BufferSource::getTrackInfo(int32_t trackIndex) {
- if (mSize <= mReadIndex + 2 * sizeof(int) + sizeof(uint8_t)) {
+ if (mSize <= mReadIndex + sizeof(uint8_t)) {
return false;
}
size_t mimeTypeIdx = mData[mReadIndex] % kSupportedMimeTypes;
char *mime = (char *)supportedMimeTypes[mimeTypeIdx].c_str();
mParams[trackIndex].mime = mime;
- ++mReadIndex;
+ mReadIndex += sizeof(uint8_t);
- if (!strncmp(mime, "audio/", 6)) {
- copy(mData + mReadIndex, mData + mReadIndex + sizeof(int),
- reinterpret_cast<char *>(&mParams[trackIndex].channelCount));
- copy(mData + mReadIndex + sizeof(int), mData + mReadIndex + 2 * sizeof(int),
- reinterpret_cast<char *>(&mParams[trackIndex].sampleRate));
+ if (mSize > mReadIndex + 2 * sizeof(int32_t)) {
+ if (!strncmp(mime, "audio/", 6)) {
+ copy(mData + mReadIndex, mData + mReadIndex + sizeof(int32_t),
+ reinterpret_cast<char *>(&mParams[trackIndex].channelCount));
+ copy(mData + mReadIndex + sizeof(int32_t), mData + mReadIndex + 2 * sizeof(int32_t),
+ reinterpret_cast<char *>(&mParams[trackIndex].sampleRate));
+ } else if (!strncmp(mime, "video/", 6)) {
+ copy(mData + mReadIndex, mData + mReadIndex + sizeof(int32_t),
+ reinterpret_cast<char *>(&mParams[trackIndex].height));
+ copy(mData + mReadIndex + sizeof(int32_t), mData + mReadIndex + 2 * sizeof(int32_t),
+ reinterpret_cast<char *>(&mParams[trackIndex].width));
+ }
+ mReadIndex += 2 * sizeof(int32_t);
} else {
- copy(mData + mReadIndex, mData + mReadIndex + sizeof(int),
- reinterpret_cast<char *>(&mParams[trackIndex].height));
- copy(mData + mReadIndex + sizeof(int), mData + mReadIndex + 2 * sizeof(int),
- reinterpret_cast<char *>(&mParams[trackIndex].width));
+ if (strncmp(mime, "text/", 5) && strncmp(mime, "application/", 12)) {
+ return false;
+ }
}
- mReadIndex += 2 * sizeof(int);
return true;
}
@@ -173,7 +177,7 @@
}
format->setInt32("channel-count", params.channelCount);
format->setInt32("sample-rate", params.sampleRate);
- } else {
+ } else if (!strncmp(params.mime, "video/", 6)) {
format->setInt32("width", params.width);
format->setInt32("height", params.height);
}
@@ -193,11 +197,10 @@
mWriter->start(mFileMeta.get());
}
-void WriterFuzzerBase::sendBuffersToWriter(sp<MediaAdapter> ¤tTrack, int32_t trackIndex) {
- int32_t numCsds = mBufferSource->getNumCsds(trackIndex);
+void WriterFuzzerBase::sendBuffersToWriter(sp<MediaAdapter> ¤tTrack, int32_t trackIndex,
+ int32_t startFrameIndex, int32_t endFrameIndex) {
vector<FrameData> bufferInfo = mBufferSource->getFrameList(trackIndex);
- int32_t range = bufferInfo.size();
- for (int idx = numCsds; idx < range; ++idx) {
+ for (int idx = startFrameIndex; idx < endFrameIndex; ++idx) {
sp<ABuffer> buffer = new ABuffer((void *)bufferInfo[idx].buf, bufferInfo[idx].size);
MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
@@ -209,7 +212,7 @@
// Just set the kKeyDecodingTime as the presentation time for now.
sampleMetaData.setInt64(kKeyDecodingTime, bufferInfo[idx].timeUs);
- if (bufferInfo[idx].flags == 1) {
+ if (bufferInfo[idx].flags == SampleFlag::SYNC_FLAG) {
sampleMetaData.setInt32(kKeyIsSyncFrame, true);
}
@@ -218,6 +221,28 @@
}
}
+void WriterFuzzerBase::sendBuffersInterleave(int32_t numTracks, uint8_t numBuffersInterleave) {
+ int32_t currentFrameIndex[numTracks], remainingNumFrames[numTracks], numTrackFramesDone;
+ for (int32_t idx = 0; idx < numTracks; ++idx) {
+ currentFrameIndex[idx] = mBufferSource->getNumCsds(idx);
+ remainingNumFrames[idx] = mBufferSource->getFrameList(idx).size() - currentFrameIndex[idx];
+ }
+ do {
+ numTrackFramesDone = numTracks;
+ for (int32_t idx = 0; idx < numTracks; ++idx) {
+ if (remainingNumFrames[idx] > 0) {
+ int32_t numFramesInterleave =
+ min(remainingNumFrames[idx], static_cast<int32_t>(numBuffersInterleave));
+ sendBuffersToWriter(mCurrentTrack[idx], idx, currentFrameIndex[idx],
+ currentFrameIndex[idx] + numFramesInterleave);
+ currentFrameIndex[idx] += numFramesInterleave;
+ remainingNumFrames[idx] -= numFramesInterleave;
+ --numTrackFramesDone;
+ }
+ }
+ } while (numTrackFramesDone < numTracks);
+}
+
void WriterFuzzerBase::initFileWriterAndProcessData(const uint8_t *data, size_t size) {
if (!createOutputFile()) {
return;
@@ -225,6 +250,14 @@
if (!createWriter()) {
return;
}
+
+ if (size < 1) {
+ return;
+ }
+ uint8_t numBuffersInterleave = (data[0] == 0 ? 1 : data[0]);
+ ++data;
+ --size;
+
mBufferSource = new BufferSource(data, size);
if (!mBufferSource) {
return;
@@ -246,9 +279,7 @@
addWriterSource(idx);
}
start();
- for (int32_t idx = 0; idx < mNumTracks; ++idx) {
- sendBuffersToWriter(mCurrentTrack[idx], idx);
- }
+ sendBuffersInterleave(mNumTracks, numBuffersInterleave);
for (int32_t idx = 0; idx < mNumTracks; ++idx) {
if (mCurrentTrack[idx]) {
mCurrentTrack[idx]->stop();
diff --git a/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h b/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
index da06463..4315322 100644
--- a/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
+++ b/media/libstagefright/writer_fuzzers/include/WriterFuzzerBase.h
@@ -34,7 +34,7 @@
using namespace std;
constexpr uint32_t kMimeSize = 128;
-constexpr uint8_t kMaxTrackCount = 2;
+constexpr uint8_t kMaxTrackCount = 3;
constexpr uint32_t kMaxCSDStrlen = 16;
constexpr uint32_t kCodecConfigFlag = 32;
@@ -49,25 +49,65 @@
};
struct FrameData {
- int32_t size;
+ size_t size;
uint8_t flags;
int64_t timeUs;
const uint8_t* buf;
};
-static string supportedMimeTypes[] = {
- "audio/3gpp", "audio/amr-wb", "audio/vorbis", "audio/opus",
- "audio/mp4a-latm", "video/avc", "video/hevc", "video/mp4v-es",
- "video/3gpp", "video/x-vnd.on2.vp8", "video/x-vnd.on2.vp9",
-};
+static string supportedMimeTypes[] = {"audio/3gpp",
+ "audio/amr-wb",
+ "audio/vorbis",
+ "audio/opus",
+ "audio/mp4a-latm",
+ "audio/mpeg",
+ "audio/mpeg-L1",
+ "audio/mpeg-L2",
+ "audio/midi",
+ "audio/qcelp",
+ "audio/g711-alaw",
+ "audio/g711-mlaw",
+ "audio/flac",
+ "audio/aac-adts",
+ "audio/gsm",
+ "audio/ac3",
+ "audio/eac3",
+ "audio/eac3-joc",
+ "audio/ac4",
+ "audio/scrambled",
+ "audio/alac",
+ "audio/x-ms-wma",
+ "audio/x-adpcm-ms",
+ "audio/x-adpcm-dvi-ima",
+ "video/avc",
+ "video/hevc",
+ "video/mp4v-es",
+ "video/3gpp",
+ "video/x-vnd.on2.vp8",
+ "video/x-vnd.on2.vp9",
+ "video/av01",
+ "video/mpeg2",
+ "video/dolby-vision",
+ "video/scrambled",
+ "video/divx",
+ "video/divx3",
+ "video/xvid",
+ "video/x-motion-jpeg",
+ "text/3gpp-tt",
+ "application/x-subrip",
+ "text/vtt",
+ "text/cea-608",
+ "text/cea-708",
+ "application/x-id3v4"};
-enum {
+enum SampleFlag {
DEFAULT_FLAG = 0,
SYNC_FLAG = 1,
ENCRYPTED_FLAG = 2,
};
-static uint8_t flagTypes[] = {DEFAULT_FLAG, SYNC_FLAG, ENCRYPTED_FLAG};
+static uint8_t flagTypes[] = {SampleFlag::DEFAULT_FLAG, SampleFlag::SYNC_FLAG,
+ SampleFlag::ENCRYPTED_FLAG};
class WriterFuzzerBase {
public:
@@ -105,7 +145,10 @@
void start();
- void sendBuffersToWriter(sp<MediaAdapter>& currentTrack, int32_t trackIndex);
+ void sendBuffersToWriter(sp<MediaAdapter>& currentTrack, int32_t trackIndex,
+ int32_t startFrameIndex, int32_t endFrameIndex);
+
+ void sendBuffersInterleave(int32_t numTracks, uint8_t numBuffersInterleave);
void initFileWriterAndProcessData(const uint8_t* data, size_t size);
@@ -126,7 +169,7 @@
void getFrameInfo();
ConfigFormat getConfigFormat(int32_t trackIndex);
int32_t getNumCsds(int32_t trackIndex);
- vector<FrameData> getFrameList(int32_t trackIndex);
+ vector<FrameData>& getFrameList(int32_t trackIndex);
private:
bool isMarker() { return (memcmp(&mData[mReadIndex], kMarker, kMarkerSize) == 0); }
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
new file mode 100644
index 0000000..df4ef95
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -0,0 +1,55 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+cc_fuzz {
+ name: "camera_service_fuzzer",
+ srcs: [
+ "camera_service_fuzzer.cpp",
+ ],
+ header_libs: [
+ "libmedia_headers",
+ ],
+ shared_libs: [
+ "libbinder",
+ "libbase",
+ "libutils",
+ "libcutils",
+ "libcameraservice",
+ "libcamera_client",
+ "libui",
+ "libgui",
+ "android.hardware.camera.common@1.0",
+ "android.hardware.camera.provider@2.4",
+ "android.hardware.camera.provider@2.5",
+ "android.hardware.camera.provider@2.6",
+ "android.hardware.camera.device@1.0",
+ "android.hardware.camera.device@3.2",
+ "android.hardware.camera.device@3.3",
+ "android.hardware.camera.device@3.4",
+ "android.hardware.camera.device@3.5",
+ "android.hardware.camera.device@3.6",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ },
+}
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/README.md b/services/camera/libcameraservice/libcameraservice_fuzzer/README.md
new file mode 100644
index 0000000..c703845
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/README.md
@@ -0,0 +1,59 @@
+# Fuzzer for libcameraservice
+
+## Plugin Design Considerations
+The fuzzer plugin is designed based on the understanding of the
+library and tries to achieve the following:
+
+##### Maximize code coverage
+The configuration parameters are not hardcoded, but instead selected based on
+incoming data. This ensures more code paths are reached by the fuzzer.
+
+libcameraservice supports the following parameters:
+1. Camera Type (parameter name: `cameraType`)
+2. Camera API Version (parameter name: `cameraAPIVersion`)
+3. Event ID (parameter name: `eventId`)
+4. Camera Sound Kind (parameter name: `soundKind`)
+5. Shell Command (parameter name: `shellCommand`)
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+| `cameraType` | 0. `CAMERA_TYPE_BACKWARD_COMPATIBLE` 1. `CAMERA_TYPE_ALL` | Value obtained from FuzzedDataProvider |
+| `cameraAPIVersion` | 0. `API_VERSION_1` 1. `API_VERSION_2` | Value obtained from FuzzedDataProvider |
+| `eventId` | 0. `EVENT_USER_SWITCHED` 1. `EVENT_NONE` | Value obtained from FuzzedDataProvider |
+| `soundKind` | 0. `SOUND_SHUTTER` 1. `SOUND_RECORDING_START` 2. `SOUND_RECORDING_STOP`| Value obtained from FuzzedDataProvider |
+| `shellCommand` | 0. `set-uid-state` 1. `reset-uid-state` 2. `get-uid-state` 3. `set-rotate-and-crop` 4. `get-rotate-and-crop` 5. `help`| Value obtained from FuzzedDataProvider |
+
+This also ensures that the plugin is always deterministic for any given input.
+
+##### Maximize utilization of input data
+The plugin tolerates any kind of input (empty, huge,
+malformed, etc) and doesn't `exit()` on any input and thereby increasing the
+chance of identifying vulnerabilities.
+
+## Build
+
+This describes steps to build camera_service_fuzzer binary.
+
+### Android
+
+#### Steps to build
+Build the fuzzer
+```
+ $ mm -j$(nproc) camera_service_fuzzer
+```
+
+#### Steps to run
+Create a directory CORPUS_DIR
+```
+ $ adb shell mkdir CORPUS_DIR
+```
+
+To run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/camera_service_fuzzer/camera_service_fuzzer CORPUS_DIR
+```
+
+## References:
+ * http://llvm.org/docs/LibFuzzer.html
+ * https://github.com/google/oss-fuzz
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
new file mode 100644
index 0000000..54550a5
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -0,0 +1,433 @@
+/******************************************************************************
+ *
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *****************************************************************************
+ * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
+ */
+
+#include <CameraService.h>
+#include <android/hardware/ICameraServiceListener.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <private/android_filesystem_config.h>
+#include "fuzzer/FuzzedDataProvider.h"
+
+using namespace android;
+using namespace hardware;
+using namespace std;
+
+const int32_t kPreviewThreshold = 8;
+const nsecs_t kPreviewTimeout = 5000000000; // .5 [s.]
+const nsecs_t kEventTimeout = 10000000000; // 1 [s.]
+const size_t kMaxNumLines = USHRT_MAX;
+const size_t kMinArgs = 1;
+const size_t kMaxArgs = 5;
+const int32_t kCamType[] = {hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
+ hardware::ICameraService::CAMERA_TYPE_ALL};
+const int kCameraApiVersion[] = {android::CameraService::API_VERSION_1,
+ android::CameraService::API_VERSION_2};
+const int kLayerMetadata[] = {
+ 0x00100000 /*GRALLOC_USAGE_RENDERSCRIPT*/, 0x00000003 /*GRALLOC_USAGE_SW_READ_OFTEN*/,
+ 0x00000100 /*GRALLOC_USAGE_HW_TEXTURE*/, 0x00000800 /*GRALLOC_USAGE_HW_COMPOSER*/,
+ 0x00000200 /*GRALLOC_USAGE_HW_RENDER*/, 0x00010000 /*GRALLOC_USAGE_HW_VIDEO_ENCODER*/};
+const int kCameraMsg[] = {0x001 /*CAMERA_MSG_ERROR*/,
+ 0x002 /*CAMERA_MSG_SHUTTER*/,
+ 0x004 /*CAMERA_MSG_FOCUS*/,
+ 0x008 /*CAMERA_MSG_ZOOM*/,
+ 0x010 /*CAMERA_MSG_PREVIEW_FRAME*/,
+ 0x020 /*CAMERA_MSG_VIDEO_FRAME */,
+ 0x040 /*CAMERA_MSG_POSTVIEW_FRAME*/,
+ 0x080 /*CAMERA_MSG_RAW_IMAGE */,
+ 0x100 /*CAMERA_MSG_COMPRESSED_IMAGE*/,
+ 0x200 /*CAMERA_MSG_RAW_IMAGE_NOTIFY*/,
+ 0x400 /*CAMERA_MSG_PREVIEW_METADATA*/,
+ 0x800 /*CAMERA_MSG_FOCUS_MOVE*/};
+const int32_t kEventId[] = {ICameraService::EVENT_USER_SWITCHED, ICameraService::EVENT_NONE};
+const android::CameraService::sound_kind kSoundKind[] = {
+ android::CameraService::SOUND_SHUTTER, android::CameraService::SOUND_RECORDING_START,
+ android::CameraService::SOUND_RECORDING_STOP};
+const String16 kShellCmd[] = {String16("set-uid-state"), String16("reset-uid-state"),
+ String16("get-uid-state"), String16("set-rotate-and-crop"),
+ String16("get-rotate-and-crop"), String16("help")};
+const size_t kNumLayerMetaData = size(kLayerMetadata);
+const size_t kNumCameraMsg = size(kCameraMsg);
+const size_t kNumSoundKind = size(kSoundKind);
+const size_t kNumShellCmd = size(kShellCmd);
+
+class CameraFuzzer : public ::android::hardware::BnCameraClient {
+ public:
+ CameraFuzzer() = default;
+ ~CameraFuzzer() { deInit(); }
+ bool init();
+ void process(const uint8_t *data, size_t size);
+ void deInit();
+
+ private:
+ FuzzedDataProvider *mFuzzedDataProvider = nullptr;
+ sp<CameraService> mCameraService = nullptr;
+ sp<SurfaceComposerClient> mComposerClient = nullptr;
+ int32_t mNumCameras = 0;
+ size_t mPreviewBufferCount = 0;
+ bool mAutoFocusMessage = false;
+ bool mSnapshotNotification = false;
+ mutable Mutex mPreviewLock;
+ mutable Condition mPreviewCondition;
+ mutable Mutex mAutoFocusLock;
+ mutable Condition mAutoFocusCondition;
+ mutable Mutex mSnapshotLock;
+ mutable Condition mSnapshotCondition;
+
+ void getNumCameras();
+ void getCameraInformation(int32_t cameraId);
+ void invokeCameraAPIs();
+ void invokeCameraSound();
+ void invokeDump();
+ void invokeShellCommand();
+ void invokeNotifyCalls();
+
+ // CameraClient interface
+ void notifyCallback(int32_t msgType, int32_t, int32_t) override;
+ void dataCallback(int32_t msgType, const sp<IMemory> &, camera_frame_metadata_t *) override;
+ void dataCallbackTimestamp(nsecs_t, int32_t, const sp<IMemory> &) override{};
+ void recordingFrameHandleCallbackTimestamp(nsecs_t, native_handle_t *) override{};
+ void recordingFrameHandleCallbackTimestampBatch(
+ const std::vector<nsecs_t> &, const std::vector<native_handle_t *> &) override{};
+ status_t waitForPreviewStart();
+ status_t waitForEvent(Mutex &mutex, Condition &condition, bool &flag);
+};
+
+void CameraFuzzer::notifyCallback(int32_t msgType, int32_t, int32_t) {
+ if (CAMERA_MSG_FOCUS == msgType) {
+ Mutex::Autolock l(mAutoFocusLock);
+ mAutoFocusMessage = true;
+ mAutoFocusCondition.broadcast();
+ }
+};
+
+void CameraFuzzer::dataCallback(int32_t msgType, const sp<IMemory> & /*data*/,
+ camera_frame_metadata_t *) {
+ switch (msgType) {
+ case CAMERA_MSG_PREVIEW_FRAME: {
+ Mutex::Autolock l(mPreviewLock);
+ ++mPreviewBufferCount;
+ mPreviewCondition.broadcast();
+ break;
+ }
+ case CAMERA_MSG_COMPRESSED_IMAGE: {
+ Mutex::Autolock l(mSnapshotLock);
+ mSnapshotNotification = true;
+ mSnapshotCondition.broadcast();
+ break;
+ }
+ default:
+ break;
+ }
+};
+
+status_t CameraFuzzer::waitForPreviewStart() {
+ status_t rc = NO_ERROR;
+ Mutex::Autolock l(mPreviewLock);
+ mPreviewBufferCount = 0;
+
+ while (mPreviewBufferCount < kPreviewThreshold) {
+ rc = mPreviewCondition.waitRelative(mPreviewLock, kPreviewTimeout);
+ if (NO_ERROR != rc) {
+ break;
+ }
+ }
+
+ return rc;
+}
+
+status_t CameraFuzzer::waitForEvent(Mutex &mutex, Condition &condition, bool &flag) {
+ status_t rc = NO_ERROR;
+ Mutex::Autolock l(mutex);
+ flag = false;
+
+ while (!flag) {
+ rc = condition.waitRelative(mutex, kEventTimeout);
+ if (NO_ERROR != rc) {
+ break;
+ }
+ }
+
+ return rc;
+}
+
+bool CameraFuzzer::init() {
+ setuid(AID_MEDIA);
+ mCameraService = new CameraService();
+ if (mCameraService) {
+ return true;
+ }
+ return false;
+}
+
+void CameraFuzzer::deInit() {
+ if (mCameraService) {
+ mCameraService = nullptr;
+ }
+ if (mComposerClient) {
+ mComposerClient->dispose();
+ }
+}
+
+void CameraFuzzer::getNumCameras() {
+ bool shouldPassInvalidCamType = mFuzzedDataProvider->ConsumeBool();
+ int32_t camType;
+ if (shouldPassInvalidCamType) {
+ camType = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+ } else {
+ camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
+ }
+ mCameraService->getNumberOfCameras(camType, &mNumCameras);
+}
+
+void CameraFuzzer::getCameraInformation(int32_t cameraId) {
+ String16 cameraIdStr = String16(String8::format("%d", cameraId));
+ bool isSupported = false;
+ mCameraService->supportsCameraApi(
+ cameraIdStr, kCameraApiVersion[mFuzzedDataProvider->ConsumeBool()], &isSupported);
+ mCameraService->isHiddenPhysicalCamera(cameraIdStr, &isSupported);
+
+ String16 parameters;
+ mCameraService->getLegacyParameters(cameraId, ¶meters);
+
+ std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination> concurrentCameraIds;
+ mCameraService->getConcurrentCameraIds(&concurrentCameraIds);
+
+ hardware::camera2::params::VendorTagDescriptorCache cache;
+ mCameraService->getCameraVendorTagCache(&cache);
+
+ CameraInfo cameraInfo;
+ mCameraService->getCameraInfo(cameraId, &cameraInfo);
+
+ CameraMetadata metadata;
+ mCameraService->getCameraCharacteristics(cameraIdStr, &metadata);
+}
+
+void CameraFuzzer::invokeCameraSound() {
+ mCameraService->increaseSoundRef();
+ mCameraService->decreaseSoundRef();
+ bool shouldPassInvalidPlaySound = mFuzzedDataProvider->ConsumeBool();
+ bool shouldPassInvalidLockSound = mFuzzedDataProvider->ConsumeBool();
+ android::CameraService::sound_kind playSound, lockSound;
+ if (shouldPassInvalidPlaySound) {
+ playSound = static_cast<android::CameraService::sound_kind>(
+ mFuzzedDataProvider->ConsumeIntegral<size_t>());
+ } else {
+ playSound =
+ kSoundKind[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumSoundKind - 1)];
+ }
+
+ if (shouldPassInvalidLockSound) {
+ lockSound = static_cast<android::CameraService::sound_kind>(
+ mFuzzedDataProvider->ConsumeIntegral<size_t>());
+ } else {
+ lockSound =
+ kSoundKind[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kNumSoundKind - 1)];
+ }
+ mCameraService->playSound(playSound);
+ mCameraService->loadSoundLocked(lockSound);
+}
+
+void CameraFuzzer::invokeDump() {
+ Vector<String16> args;
+ size_t numberOfLines = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, kMaxNumLines);
+ for (size_t lineIdx = 0; lineIdx < numberOfLines; ++lineIdx) {
+ args.add(static_cast<String16>(mFuzzedDataProvider->ConsumeRandomLengthString().c_str()));
+ }
+ const char *fileName = "logDumpFile";
+ int fd = memfd_create(fileName, MFD_ALLOW_SEALING);
+ mCameraService->dump(fd, args);
+ close(fd);
+}
+
+void CameraFuzzer::invokeShellCommand() {
+ int in = mFuzzedDataProvider->ConsumeIntegral<int>();
+ int out = mFuzzedDataProvider->ConsumeIntegral<int>();
+ int err = mFuzzedDataProvider->ConsumeIntegral<int>();
+ Vector<String16> args;
+ size_t numArgs = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinArgs, kMaxArgs);
+ for (size_t argsIdx = 0; argsIdx < numArgs; ++argsIdx) {
+ bool shouldPassInvalidCommand = mFuzzedDataProvider->ConsumeBool();
+ if (shouldPassInvalidCommand) {
+ args.add(
+ static_cast<String16>(mFuzzedDataProvider->ConsumeRandomLengthString().c_str()));
+ } else {
+ args.add(kShellCmd[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, kNumShellCmd - 1)]);
+ }
+ }
+ mCameraService->shellCommand(in, out, err, args);
+}
+
+void CameraFuzzer::invokeNotifyCalls() {
+ mCameraService->notifyMonitoredUids();
+ int64_t newState = mFuzzedDataProvider->ConsumeIntegral<int64_t>();
+ mCameraService->notifyDeviceStateChange(newState);
+ std::vector<int32_t> args;
+ size_t numArgs = mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(kMinArgs, kMaxArgs);
+ for (size_t argsIdx = 0; argsIdx < numArgs; ++argsIdx) {
+ args.push_back(mFuzzedDataProvider->ConsumeIntegral<int32_t>());
+ }
+ bool shouldPassInvalidEvent = mFuzzedDataProvider->ConsumeBool();
+ int32_t eventId;
+ if (shouldPassInvalidEvent) {
+ eventId = mFuzzedDataProvider->ConsumeIntegral<int32_t>();
+ } else {
+ eventId = kEventId[mFuzzedDataProvider->ConsumeBool()];
+ }
+ mCameraService->notifySystemEvent(eventId, args);
+}
+
+void CameraFuzzer::invokeCameraAPIs() {
+ for (int32_t cameraId = 0; cameraId < mNumCameras; ++cameraId) {
+ getCameraInformation(cameraId);
+
+ const String16 opPackageName("com.fuzzer.poc");
+ ::android::binder::Status rc;
+ sp<ICamera> cameraDevice;
+
+ rc = mCameraService->connect(this, cameraId, opPackageName, AID_MEDIA, AID_ROOT,
+ &cameraDevice);
+ if (!rc.isOk()) {
+ // camera not connected
+ return;
+ }
+ if (cameraDevice) {
+ sp<Surface> previewSurface;
+ sp<SurfaceControl> surfaceControl;
+ CameraParameters params(cameraDevice->getParameters());
+ String8 focusModes(params.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES));
+ bool isAFSupported = false;
+ const char *focusMode = nullptr;
+
+ if (focusModes.contains(CameraParameters::FOCUS_MODE_AUTO)) {
+ isAFSupported = true;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO;
+ } else if (focusModes.contains(CameraParameters::FOCUS_MODE_MACRO)) {
+ isAFSupported = true;
+ focusMode = CameraParameters::FOCUS_MODE_MACRO;
+ }
+ if (nullptr != focusMode) {
+ params.set(CameraParameters::KEY_FOCUS_MODE, focusMode);
+ cameraDevice->setParameters(params.flatten());
+ }
+ int previewWidth, previewHeight;
+ params.getPreviewSize(&previewWidth, &previewHeight);
+
+ mComposerClient = new SurfaceComposerClient;
+ mComposerClient->initCheck();
+
+ bool shouldPassInvalidLayerMetaData = mFuzzedDataProvider->ConsumeBool();
+ int layerMetaData;
+ if (shouldPassInvalidLayerMetaData) {
+ layerMetaData = mFuzzedDataProvider->ConsumeIntegral<int>();
+ } else {
+ layerMetaData = kLayerMetadata[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, kNumLayerMetaData - 1)];
+ }
+ surfaceControl = mComposerClient->createSurface(
+ String8("Test Surface"), previewWidth, previewHeight,
+ CameraParameters::previewFormatToEnum(params.getPreviewFormat()), layerMetaData);
+
+ if (surfaceControl.get() != nullptr) {
+ SurfaceComposerClient::Transaction{}
+ .setLayer(surfaceControl, 0x7fffffff)
+ .show(surfaceControl)
+ .apply();
+
+ previewSurface = surfaceControl->getSurface();
+ cameraDevice->setPreviewTarget(previewSurface->getIGraphicBufferProducer());
+ }
+ cameraDevice->setPreviewCallbackFlag(CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER);
+
+ Vector<Size> pictureSizes;
+ params.getSupportedPictureSizes(pictureSizes);
+
+ for (size_t i = 0; i < pictureSizes.size(); ++i) {
+ params.setPictureSize(pictureSizes[i].width, pictureSizes[i].height);
+ cameraDevice->setParameters(params.flatten());
+ cameraDevice->startPreview();
+ waitForPreviewStart();
+ cameraDevice->autoFocus();
+ waitForEvent(mAutoFocusLock, mAutoFocusCondition, mAutoFocusMessage);
+ bool shouldPassInvalidCameraMsg = mFuzzedDataProvider->ConsumeBool();
+ int msgType;
+ if (shouldPassInvalidCameraMsg) {
+ msgType = mFuzzedDataProvider->ConsumeIntegral<int>();
+ } else {
+ msgType = kCameraMsg[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(
+ 0, kNumCameraMsg - 1)];
+ }
+ cameraDevice->takePicture(msgType);
+
+ waitForEvent(mSnapshotLock, mSnapshotCondition, mSnapshotNotification);
+ }
+
+ Vector<Size> videoSizes;
+ params.getSupportedVideoSizes(videoSizes);
+
+ for (size_t i = 0; i < videoSizes.size(); ++i) {
+ params.setVideoSize(videoSizes[i].width, videoSizes[i].height);
+
+ cameraDevice->setParameters(params.flatten());
+ cameraDevice->startPreview();
+ waitForPreviewStart();
+ cameraDevice->setVideoBufferMode(
+ android::hardware::BnCamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
+ cameraDevice->setVideoTarget(previewSurface->getIGraphicBufferProducer());
+ cameraDevice->startRecording();
+ cameraDevice->stopRecording();
+ }
+ cameraDevice->stopPreview();
+ cameraDevice->disconnect();
+ }
+ }
+}
+
+void CameraFuzzer::process(const uint8_t *data, size_t size) {
+ mFuzzedDataProvider = new FuzzedDataProvider(data, size);
+ getNumCameras();
+ invokeCameraSound();
+ if (mNumCameras > 0) {
+ invokeCameraAPIs();
+ }
+ invokeDump();
+ invokeShellCommand();
+ invokeNotifyCalls();
+ delete mFuzzedDataProvider;
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+ if (size < 1) {
+ return 0;
+ }
+ sp<CameraFuzzer> camerafuzzer = new CameraFuzzer();
+ if (!camerafuzzer) {
+ return 0;
+ }
+ if (camerafuzzer->init()) {
+ camerafuzzer->process(data, size);
+ }
+ return 0;
+}