Merge "Revert "CCodec: fix dataspace configured for the input surfaces"" into sc-dev
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index abd8b2d..74b099c 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -46,8 +46,8 @@
{"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
{"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
"bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
- {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
- {"audio/mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
+ {"mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3", "bbb_mp3_stereo_192kbps_48000hz.info"},
+ {"mpeg", "bbb_mp3_stereo_192kbps_48000hz.mp3",
"bbb_mp3_stereo_192kbps_48000hz_multi_frame.info"},
{"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb", "sine_amrnb_1ch_12kbps_8000hz.info"},
{"3gpp", "sine_amrnb_1ch_12kbps_8000hz.amrnb",
@@ -303,7 +303,7 @@
int streamCount = 0;
for (size_t i = 0; i < gCompToFiles.size(); ++i) {
- if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
+ if (!mMime.compare("audio/" + gCompToFiles[i].mime)) {
if (streamCount == streamIndex) {
mInputFile = sResourceDir + gCompToFiles[i].inputFile;
mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
diff --git a/media/codec2/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
index f211ecf..ecc4f9d 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
@@ -36,6 +36,8 @@
"libgui",
"libutils",
"libcrypto",
+ "libdatasource",
+ "libui",
],
data: [":media_c2_v1_video_decode_res"],
test_config: "VtsHalMediaC2V1_0TargetVideoDecTest.xml",
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index c331d0b..95a4674 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -33,11 +33,18 @@
#include <gui/IConsumerListener.h>
#include <gui/IProducerListener.h>
#include <system/window.h>
+#include <gui/GLConsumer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
-using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+constexpr size_t kSmoothnessFactor = 4;
+constexpr size_t kRenderingDepth = 3;
+enum surfaceMode_t { NO_SURFACE, NULL_SURFACE, SURFACE };
+
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool, surfaceMode_t>;
static std::vector<DecodeTestParameters> gDecodeTestParameters;
using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
@@ -64,13 +71,15 @@
{"3gpp", "bbb_h263_352x288_300kbps_12fps.h263", "bbb_h263_352x288_300kbps_12fps.info", ""},
{"mp4v-es", "bbb_mpeg4_352x288_512kbps_30fps.m4v", "bbb_mpeg4_352x288_512kbps_30fps.info",
""},
- {"vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info", ""},
- {"vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
+ {"x-vnd.on2.vp8", "bbb_vp8_176x144_240kbps_60fps.vp8", "bbb_vp8_176x144_240kbps_60fps.info",
+ ""},
+ {"x-vnd.on2.vp8", "bbb_vp8_640x360_2mbps_30fps.vp8", "bbb_vp8_640x360_2mbps_30fps.info",
"bbb_vp8_640x360_2mbps_30fps_chksm.md5"},
- {"vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info", ""},
- {"vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9", "bbb_vp9_640x360_1600kbps_30fps.info",
- "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
- {"vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
+ {"x-vnd.on2.vp9", "bbb_vp9_176x144_285kbps_60fps.vp9", "bbb_vp9_176x144_285kbps_60fps.info",
+ ""},
+ {"x-vnd.on2.vp9", "bbb_vp9_640x360_1600kbps_30fps.vp9",
+ "bbb_vp9_640x360_1600kbps_30fps.info", "bbb_vp9_640x360_1600kbps_30fps_chksm.md5"},
+ {"x-vnd.on2.vp9", "bbb_vp9_704x480_280kbps_24fps_altref_2.vp9",
"bbb_vp9_704x480_280kbps_24fps_altref_2.info", ""},
{"av01", "bbb_av1_640_360.av1", "bbb_av1_640_360.info", "bbb_av1_640_360_chksum.md5"},
{"av01", "bbb_av1_176_144.av1", "bbb_av1_176_144.info", "bbb_av1_176_144_chksm.md5"},
@@ -379,7 +388,7 @@
int streamCount = 0;
for (size_t i = 0; i < gCompToFiles.size(); ++i) {
- if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
+ if (!mMime.compare("video/" + gCompToFiles[i].mime)) {
if (streamCount == streamIndex) {
mInputFile = sResourceDir + gCompToFiles[i].inputFile;
mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
@@ -392,6 +401,36 @@
return false;
}
+void setOutputSurface(const std::shared_ptr<android::Codec2Client::Component>& component,
+ surfaceMode_t surfMode) {
+ using namespace android;
+ sp<IGraphicBufferProducer> producer = nullptr;
+ static std::atomic_uint32_t surfaceGeneration{0};
+ uint32_t generation =
+ (getpid() << 10) |
+ ((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1) & ((1 << 10) - 1));
+ int32_t maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
+ if (surfMode == SURFACE) {
+ sp<IGraphicBufferConsumer> consumer = nullptr;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
+ ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
+
+ sp<GLConsumer> texture =
+ new GLConsumer(consumer, 0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
+ true /* useFenceSync */, false /* isControlledByApp */);
+
+ sp<ANativeWindow> gSurface = new Surface(producer);
+ ASSERT_NE(gSurface, nullptr) << "getSurface failed";
+
+ producer->setGenerationNumber(generation);
+ }
+
+ c2_status_t err = component->setOutputSurface(C2BlockPool::BASIC_GRAPHIC, producer, generation,
+ maxDequeueBuffers);
+ ASSERT_EQ(err, C2_OK) << "setOutputSurface failed";
+}
+
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
std::mutex& queueLock, std::condition_variable& queueCondition,
std::list<std::unique_ptr<C2Work>>& workQueue,
@@ -550,6 +589,7 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
bool signalEOS = std::get<3>(GetParam());
+ surfaceMode_t surfMode = std::get<4>(GetParam());
mTimestampDevTest = true;
android::Vector<FrameInfo> Info;
@@ -594,6 +634,10 @@
refChksum.close();
}
+ if (surfMode != NO_SURFACE) {
+ ASSERT_NO_FATAL_FAILURE(setOutputSurface(mComponent, surfMode));
+ }
+
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
(int)Info.size(), signalEOS));
@@ -1061,18 +1105,23 @@
parseArgs(argc, argv);
gTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
for (auto params : gTestParameters) {
+ // mOutputBufferQueue->configure() crashes when surface is NULL
+ std::initializer_list<surfaceMode_t> surfaceMode = {
+ surfaceMode_t::NO_SURFACE, surfaceMode_t::NULL_SURFACE, surfaceMode_t::SURFACE};
+ for (surfaceMode_t mode : surfaceMode) {
+ gDecodeTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false, mode));
+ gDecodeTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true, mode));
+ }
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false, NO_SURFACE));
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true, NO_SURFACE));
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false, NO_SURFACE));
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
- gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
- gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true, NO_SURFACE));
gCsdFlushTestParameters.push_back(
std::make_tuple(std::get<0>(params), std::get<1>(params), true));
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index 8cd4934..de34c24 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -181,7 +181,7 @@
int maxDequeueBufferCount,
std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj) {
uint64_t consumerUsage = 0;
- if (igbp->getConsumerUsage(&consumerUsage) != OK) {
+ if (igbp && igbp->getConsumerUsage(&consumerUsage) != OK) {
ALOGW("failed to get consumer usage");
}
@@ -254,6 +254,9 @@
mBqId = bqId;
mOwner = std::make_shared<int>(0);
mMaxDequeueBufferCount = maxDequeueBufferCount;
+ if (igbp == nullptr) {
+ return false;
+ }
for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
if (mBqId == 0 || !mBuffers[i]) {
continue;
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 691bab1..4070478 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -491,7 +491,7 @@
* align(mHeight, 64) / plane.rowSampling;
}
- if ((maxPtr - minPtr + 1) <= planeSize) {
+ if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
// FIXME: this is risky as reading/writing data out of bound results
// in an undefined behavior, but gralloc does assume a
// contiguous mapping
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 88e752b..f1eeaa3 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -1922,6 +1922,14 @@
return statusTFromBinderStatus(aps->setAssistantUid(uidAidl));
}
+status_t AudioSystem::setHotwordDetectionServiceUid(uid_t uid) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+
+ int32_t uidAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(uid));
+ return statusTFromBinderStatus(aps->setHotwordDetectionServiceUid(uidAidl));
+}
+
status_t AudioSystem::setA11yServicesUids(const std::vector<uid_t>& uids) {
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index e46b349..cae81f0 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -665,7 +665,11 @@
status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
audio_patch_handle_t* handle) {
media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
- int32_t aidlRet;
+ int32_t aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(
+ AUDIO_PATCH_HANDLE_NONE));
+ if (handle != nullptr) {
+ aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(*handle));
+ }
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
mDelegate->createAudioPatch(patchAidl, &aidlRet)));
if (handle != nullptr) {
@@ -1140,7 +1144,8 @@
Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
int32_t* _aidl_return) {
audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
- audio_patch_handle_t handleLegacy;
+ audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_patch_handle_t(*_aidl_return));
RETURN_BINDER_IF_ERROR(mDelegate->createAudioPatch(&patchLegacy, &handleLegacy));
*_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_patch_handle_t_int32_t(handleLegacy));
return Status::ok();
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 65bcd82..4c3955a 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -299,6 +299,8 @@
void setAssistantUid(int /* uid_t */ uid);
+ void setHotwordDetectionServiceUid(int /* uid_t */ uid);
+
void setA11yServicesUids(in int[] /* uid_t[] */ uids);
void setCurrentImeUid(int /* uid_t */ uid);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index a9109c8..8ba23ad 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -429,6 +429,7 @@
static status_t setSurroundFormatEnabled(audio_format_t audioFormat, bool enabled);
static status_t setAssistantUid(uid_t uid);
+ static status_t setHotwordDetectionServiceUid(uid_t uid);
static status_t setA11yServicesUids(const std::vector<uid_t>& uids);
static status_t setCurrentImeUid(uid_t uid);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index d94cecf..9ae7ddb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2856,10 +2856,43 @@
CHECK(msg->findInt32("payload-type", &payloadType));
+ int32_t rtpSeq = 0, rtpTime = 0;
+ int64_t ntpTime = 0, recvTimeUs = 0;
+
Parcel in;
in.writeInt32(payloadType);
switch (payloadType) {
+ case ARTPSource::RTP_FIRST_PACKET:
+ {
+ CHECK(msg->findInt32("rtp-time", &rtpTime));
+ CHECK(msg->findInt32("rtp-seq-num", &rtpSeq));
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ in.writeInt32(rtpTime);
+ in.writeInt32(rtpSeq);
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ break;
+ }
+ case ARTPSource::RTCP_FIRST_PACKET:
+ {
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ break;
+ }
+ case ARTPSource::RTCP_SR:
+ {
+ CHECK(msg->findInt32("rtp-time", &rtpTime));
+ CHECK(msg->findInt64("ntp-time", &ntpTime));
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ in.writeInt32(rtpTime);
+ in.writeInt32(ntpTime >> 32);
+ in.writeInt32(ntpTime & 0xFFFFFFFF);
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ break;
+ }
case ARTPSource::RTCP_TSFB: // RTCP TSFB
case ARTPSource::RTCP_PSFB: // RTCP PSFB
case ARTPSource::RTP_AUTODOWN:
@@ -2882,6 +2915,8 @@
int32_t feedbackType, bitrate;
int32_t highestSeqNum, baseSeqNum, prevExpected;
int32_t numBufRecv, prevNumBufRecv;
+ int32_t latestRtpTime, jbTimeMs, rtpRtcpSrTimeGapMs;
+ int64_t recvTimeUs;
CHECK(msg->findInt32("feedback-type", &feedbackType));
CHECK(msg->findInt32("bit-rate", &bitrate));
CHECK(msg->findInt32("highest-seq-num", &highestSeqNum));
@@ -2889,6 +2924,10 @@
CHECK(msg->findInt32("prev-expected", &prevExpected));
CHECK(msg->findInt32("num-buf-recv", &numBufRecv));
CHECK(msg->findInt32("prev-num-buf-recv", &prevNumBufRecv));
+ CHECK(msg->findInt32("latest-rtp-time", &latestRtpTime));
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ CHECK(msg->findInt32("rtp-jitter-time-ms", &jbTimeMs));
+ CHECK(msg->findInt32("rtp-rtcpsr-time-gap-ms", &rtpRtcpSrTimeGapMs));
in.writeInt32(feedbackType);
in.writeInt32(bitrate);
in.writeInt32(highestSeqNum);
@@ -2896,6 +2935,11 @@
in.writeInt32(prevExpected);
in.writeInt32(numBufRecv);
in.writeInt32(prevNumBufRecv);
+ in.writeInt32(latestRtpTime);
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ in.writeInt32(jbTimeMs);
+ in.writeInt32(rtpRtcpSrTimeGapMs);
break;
}
case ARTPSource::RTP_CVO:
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index d2d978a..4d6a483 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -395,23 +395,13 @@
CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
onTimeUpdate(trackIndex, rtpTime, ntpTime);
- break;
- }
-
- int32_t firstRTCP;
- if (msg->findInt32("first-rtcp", &firstRTCP)) {
- // There won't be an access unit here, it's just a notification
- // that the data communication worked since we got the first
- // rtcp packet.
- ALOGV("first-rtcp");
- break;
}
int32_t IMSRxNotice;
if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
- int32_t payloadType, feedbackType;
+ int32_t payloadType = 0, feedbackType = 0;
CHECK(msg->findInt32("payload-type", &payloadType));
- CHECK(msg->findInt32("feedback-type", &feedbackType));
+ msg->findInt32("feedback-type", &feedbackType);
sp<AMessage> notify = dupNotify();
notify->setInt32("what", kWhatIMSRxNotice);
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 7272a74..d21908f 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -50,6 +50,8 @@
static constexpr int32_t kDefaultBitrateMbps = 10 * 1000 * 1000;
// Default frame rate.
static constexpr int32_t kDefaultFrameRate = 30;
+// Default codec complexity
+static constexpr int32_t kDefaultCodecComplexity = 1;
template <typename T>
void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
@@ -247,6 +249,7 @@
SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_PRIORITY, encoderFormat, kDefaultCodecPriority);
SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_FRAME_RATE, encoderFormat, kDefaultFrameRate);
+ SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_COMPLEXITY, encoderFormat, kDefaultCodecComplexity);
AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
// Always encode without rotation. The rotation degree will be transferred directly to
diff --git a/media/libstagefright/CodecBase.cpp b/media/libstagefright/CodecBase.cpp
index 5b724aa..b9fb041 100644
--- a/media/libstagefright/CodecBase.cpp
+++ b/media/libstagefright/CodecBase.cpp
@@ -40,4 +40,31 @@
buf->size = size;
}
+status_t CodecBase::querySupportedParameters(std::vector<std::string> *names) {
+ if (names == nullptr) {
+ return BAD_VALUE;
+ }
+ names->clear();
+ return OK;
+}
+
+status_t CodecBase::describeParameter(const std::string &, CodecParameterDescriptor *) {
+ return ERROR_UNSUPPORTED;
+}
+
+status_t CodecBase::subscribeToParameters(const std::vector<std::string> &names) {
+ if (names.empty()) {
+ return OK;
+ }
+ return ERROR_UNSUPPORTED;
+}
+
+status_t CodecBase::unsubscribeFromParameters(const std::vector<std::string> &names) {
+ if (names.empty()) {
+ return OK;
+ }
+ return ERROR_UNSUPPORTED;
+}
+
+
} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index efb2f86..48721ec 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -252,9 +252,7 @@
* INVALID_OPERATION if already released;
* ERROR_UNSUPPORTED if not supported.
*/
- virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
- return ERROR_UNSUPPORTED;
- }
+ virtual status_t querySupportedParameters(std::vector<std::string> *names);
/**
* Fill |desc| with description of the parameter with |name|.
*
@@ -267,10 +265,8 @@
* ERROR_UNSUPPORTED if not supported.
*/
virtual status_t describeParameter(
- [[maybe_unused]] const std::string &name,
- [[maybe_unused]] CodecParameterDescriptor *desc) {
- return ERROR_UNSUPPORTED;
- }
+ const std::string &name,
+ CodecParameterDescriptor *desc);
/**
* Subscribe to parameters in |names| and get output format change event
* when they change.
@@ -281,10 +277,7 @@
* INVALID_OPERATION if already released;
* ERROR_UNSUPPORTED if not supported.
*/
- virtual status_t subscribeToParameters(
- [[maybe_unused]] const std::vector<std::string> &names) {
- return ERROR_UNSUPPORTED;
- }
+ virtual status_t subscribeToParameters(const std::vector<std::string> &names);
/**
* Unsubscribe from parameters in |names| and no longer get
* output format change event when they change.
@@ -295,10 +288,7 @@
* INVALID_OPERATION if already released;
* ERROR_UNSUPPORTED if not supported.
*/
- virtual status_t unsubscribeFromParameters(
- [[maybe_unused]] const std::vector<std::string> &names) {
- return ERROR_UNSUPPORTED;
- }
+ virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names);
typedef CodecBase *(*CreateCodecFunc)(void);
typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index e1cc5ec..3f4d662 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -44,6 +44,7 @@
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false),
mFirstIFrameProvided(false),
+ mLastCvo(-1),
mLastIFrameProvidedAtMs(0),
mLastRtpTimeJitterDataUs(0),
mWidth(0),
@@ -137,7 +138,7 @@
}
source->putInterArrivalJitterData(rtpTime, nowTimeUs);
- const int64_t startTimeMs = source->mFirstSysTime / 1000;
+ const int64_t startTimeMs = source->mSysAnchorTime / 1000;
const int64_t nowTimeMs = nowTimeUs / 1000;
const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
@@ -195,33 +196,38 @@
if (!isExpired) {
ALOGV("buffering in jitter buffer.");
+ // set an alarm for jitter buffer time expiration.
+ // adding 1ms because jitter buffer time is keep changing.
+ int64_t expTimeUs = (RtpToMs(std::abs(diffTimeRtp), clockRate) + 1) * 1000;
+ source->setJbAlarmTime(nowTimeUs, expTimeUs);
return NOT_ENOUGH_DATA;
}
if (isFirstLineBroken) {
- if (isSecondLineBroken) {
- int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
- ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+ int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+ String8 info;
+ info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
"Seq# %d \t ExpSeq# %d \t"
"JitterMs %d + (%d + %d * %.3f)",
(long long)diffTimeRtp, (long long)totalDiffTimeMs,
buffer->int32Data(), mNextExpectedSeqNo,
jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ if (isSecondLineBroken) {
+ ALOGE("%s", info.string());
printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
- mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
} else {
- ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
- jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+ ALOGW("%s", info.string());
}
}
if (mNextExpectedSeqNoValid) {
- int32_t size = queue->size();
+ mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
if (cntRemove > 0) {
+ int32_t size = queue->size();
source->noticeAbandonBuffer(cntRemove);
ALOGW("delete %d of %d buffers", cntRemove, size);
}
@@ -441,7 +447,6 @@
uint32_t rtpTimeStartAt;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
uint32_t startSeqNo = buffer->int32Data();
- bool pFrame = nalType == 0x1;
if (data[1] & 0x40) {
// Huh? End bit also set on the first buffer.
@@ -451,8 +456,6 @@
complete = true;
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
- int32_t connected = 1;
- bool snapped = false;
while (it != queue->end()) {
ALOGV("sequence length %zu", totalCount);
@@ -463,33 +466,26 @@
if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
ALOGD("sequence not complete, expected seqNo %u, got %u, nalType %u",
- expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
- snapped = true;
-
- if (!pFrame) {
- return WRONG_SEQUENCE_NUMBER;
- }
- }
-
- if (!snapped) {
- connected++;
+ expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
}
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- if (size < 2
- || data[0] != indicator
+ if (size < 2) {
+ ALOGV("Ignoring malformed FU buffer.");
+ it = queue->erase(it);
+ continue;
+ }
+ if (data[0] != indicator
|| (data[1] & 0x1f) != nalType
|| (data[1] & 0x80)
|| rtpTime != rtpTimeStartAt) {
- ALOGV("Ignoring malformed FU buffer.");
-
- // Delete the whole start of the FU.
-
- mNextExpectedSeqNo = expectedSeqNo + 1;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
+ // Assembler already have given enough time by jitter buffer
+ ALOGD("Seems another frame. Incomplete frame [%d ~ %d) \t %d FUs",
+ startSeqNo, expectedSeqNo, (int)queue->distance(queue->begin(), it));
+ expectedSeqNo = (uint32_t)buffer->int32Data();
+ complete = true;
+ break;
}
totalSize += size - 2;
@@ -498,14 +494,6 @@
expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
if (data[1] & 0x40) {
- if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
- connected, totalCount, 0.5f)) {
- mNextExpectedSeqNo = expectedSeqNo;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
- }
-
// This is the last fragment.
complete = true;
break;
@@ -557,6 +545,9 @@
if (cvo >= 0) {
unit->meta()->setInt32("cvo", cvo);
+ mLastCvo = cvo;
+ } else if (mLastCvo >= 0) {
+ unit->meta()->setInt32("cvo", mLastCvo);
}
if (source != nullptr) {
unit->meta()->setObject("source", source);
@@ -621,35 +612,32 @@
msg->post();
}
-int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
+int32_t AAVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ // pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
- int32_t nextSeqNo = buffer->int32Data();
+ int32_t firstSeqNo = buffer->int32Data();
- Queue::const_iterator it = queue->begin();
- while (it != queue->end()) {
- int64_t rtpTime = findRTPTime(first, *it);
- // if pkt in time exists, that should be the next pivot
+ // This only works for FU-A type & non-start sequence
+ unsigned nalType = buffer->data()[0] & 0x1f;
+ if (nalType != 28 || buffer->data()[1] & 0x80) {
+ return firstSeqNo;
+ }
+
+ for (auto it : *queue) {
+ const uint8_t *data = it->data();
+ int64_t rtpTime = findRTPTime(first, it);
if (rtpTime + jit >= play) {
- nextSeqNo = (*it)->int32Data();
break;
}
- it++;
+ if ((data[1] & 0x80)) {
+ const int32_t seqNo = it->int32Data();
+ ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
+ firstSeqNo = seqNo;
+ break;
+ }
}
- return nextSeqNo;
-}
-
-bool AAVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio) {
- float total = end - start;
- float valid = connected;
- float exist = avail;
- bool isRecycle = (valid / total) >= goodRatio;
-
- ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
- exist, valid, total, isRecycle);
-
- return isRecycle;
+ return firstSeqNo;
}
int32_t AAVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 8d19773..2f8b8ba 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -22,6 +22,7 @@
#include <utils/List.h>
#include <utils/RefBase.h>
+#include <utils/String8.h>
namespace android {
@@ -47,6 +48,7 @@
uint32_t mNextExpectedSeqNo;
bool mAccessUnitDamaged;
bool mFirstIFrameProvided;
+ int32_t mLastCvo;
uint64_t mLastIFrameProvidedAtMs;
int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
@@ -64,9 +66,7 @@
void submitAccessUnit();
- int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
- bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio);
+ int32_t pickStartSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index d32e85d..b240339 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -51,6 +51,7 @@
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false),
mFirstIFrameProvided(false),
+ mLastCvo(-1),
mLastIFrameProvidedAtMs(0),
mLastRtpTimeJitterDataUs(0),
mWidth(0),
@@ -147,7 +148,7 @@
}
source->putInterArrivalJitterData(rtpTime, nowTimeUs);
- const int64_t startTimeMs = source->mFirstSysTime / 1000;
+ const int64_t startTimeMs = source->mSysAnchorTime / 1000;
const int64_t nowTimeMs = nowTimeUs / 1000;
const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
@@ -205,33 +206,38 @@
if (!isExpired) {
ALOGV("buffering in jitter buffer.");
+ // set an alarm for jitter buffer time expiration.
+ // adding 1ms because jitter buffer time is keep changing.
+ int64_t expTimeUs = (RtpToMs(std::abs(diffTimeRtp), clockRate) + 1) * 1000;
+ source->setJbAlarmTime(nowTimeUs, expTimeUs);
return NOT_ENOUGH_DATA;
}
if (isFirstLineBroken) {
- if (isSecondLineBroken) {
- int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
- ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+ int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+ String8 info;
+ info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
"Seq# %d \t ExpSeq# %d \t"
"JitterMs %d + (%d + %d * %.3f)",
(long long)diffTimeRtp, (long long)totalDiffTimeMs,
buffer->int32Data(), mNextExpectedSeqNo,
jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ if (isSecondLineBroken) {
+ ALOGE("%s", info.string());
printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
- mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
} else {
- ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
- jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+ ALOGW("%s", info.string());
}
}
if (mNextExpectedSeqNoValid) {
- int32_t size = queue->size();
+ mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
if (cntRemove > 0) {
+ int32_t size = queue->size();
source->noticeAbandonBuffer(cntRemove);
ALOGW("delete %d of %d buffers", cntRemove, size);
}
@@ -466,7 +472,6 @@
uint32_t rtpTimeStartAt;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
uint32_t startSeqNo = buffer->int32Data();
- bool pFrame = (nalType < 0x10);
if (data[2] & 0x40) {
// Huh? End bit also set on the first buffer.
@@ -476,8 +481,6 @@
complete = true;
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
- int32_t connected = 1;
- bool snapped = false;
while (it != queue->end()) {
ALOGV("sequence length %zu", totalCount);
@@ -488,33 +491,26 @@
if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
ALOGV("sequence not complete, expected seqNo %u, got %u, nalType %u",
- expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
- snapped = true;
-
- if (!pFrame) {
- return WRONG_SEQUENCE_NUMBER;
- }
- }
-
- if (!snapped) {
- connected++;
+ expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
}
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- if (size < 3
- || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+ if (size < 3) {
+ ALOGV("Ignoring malformed FU buffer.");
+ it = queue->erase(it);
+ continue;
+ }
+ if (((data[0] >> 1) & H265_NALU_MASK) != indicator
|| (data[2] & H265_NALU_MASK) != nalType
|| (data[2] & 0x80)
|| rtpTime != rtpTimeStartAt) {
- ALOGV("Ignoring malformed FU buffer.");
-
- // Delete the whole start of the FU.
-
- mNextExpectedSeqNo = expectedSeqNo + 1;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
+ // Assembler already have given enough time by jitter buffer
+ ALOGD("Seems another frame. Incomplete frame [%d ~ %d) \t %d FUs",
+ startSeqNo, expectedSeqNo, (int)queue->distance(queue->begin(), it));
+ expectedSeqNo = (uint32_t)buffer->int32Data();
+ complete = true;
+ break;
}
totalSize += size - 3;
@@ -523,13 +519,6 @@
expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
if (data[2] & 0x40) {
- if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
- connected, totalCount, 0.5f)) {
- mNextExpectedSeqNo = expectedSeqNo;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
- }
// This is the last fragment.
complete = true;
break;
@@ -579,6 +568,9 @@
if (cvo >= 0) {
unit->meta()->setInt32("cvo", cvo);
+ mLastCvo = cvo;
+ } else if (mLastCvo >= 0) {
+ unit->meta()->setInt32("cvo", mLastCvo);
}
addSingleNALUnit(unit);
@@ -635,35 +627,32 @@
msg->post();
}
-int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
+int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ // pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
- int32_t nextSeqNo = buffer->int32Data();
+ int32_t firstSeqNo = buffer->int32Data();
- Queue::const_iterator it = queue->begin();
- while (it != queue->end()) {
- int64_t rtpTime = findRTPTime(first, *it);
- // if pkt in time exists, that should be the next pivot
+ // This only works for FU-A type & non-start sequence
+ unsigned nalType = buffer->data()[0] & 0x1f;
+ if (nalType != 28 || buffer->data()[2] & 0x80) {
+ return firstSeqNo;
+ }
+
+ for (auto it : *queue) {
+ const uint8_t *data = it->data();
+ int64_t rtpTime = findRTPTime(first, it);
if (rtpTime + jit >= play) {
- nextSeqNo = (*it)->int32Data();
break;
}
- it++;
+ if ((data[2] & 0x80)) {
+ const int32_t seqNo = it->int32Data();
+ ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
+ firstSeqNo = seqNo;
+ break;
+ }
}
- return nextSeqNo;
-}
-
-bool AHEVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio) {
- float total = end - start;
- float valid = connected;
- float exist = avail;
- bool isRecycle = (valid / total) >= goodRatio;
-
- ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
- exist, valid, total, isRecycle);
-
- return isRecycle;
+ return firstSeqNo;
}
int32_t AHEVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index 68777a7..9575d8c 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -22,6 +22,7 @@
#include <utils/List.h>
#include <utils/RefBase.h>
+#include <utils/String8.h>
namespace android {
@@ -48,6 +49,7 @@
uint32_t mNextExpectedSeqNo;
bool mAccessUnitDamaged;
bool mFirstIFrameProvided;
+ int32_t mLastCvo;
uint64_t mLastIFrameProvidedAtMs;
int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
@@ -65,9 +67,7 @@
void submitAccessUnit();
- int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
- bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio);
+ int32_t pickStartSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index a4da433..ffccbb1 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -18,9 +18,7 @@
#define LOG_TAG "ARTPConnection"
#include <utils/Log.h>
-#include "ARTPAssembler.h"
#include "ARTPConnection.h"
-
#include "ARTPSource.h"
#include "ASessionDescription.h"
@@ -306,6 +304,12 @@
break;
}
+ case kWhatAlarmStream:
+ {
+ onAlarmStream(msg);
+ break;
+ }
+
case kWhatInjectPacket:
{
onInjectPacket(msg);
@@ -463,14 +467,16 @@
if (err == -ECONNRESET) {
// socket failure, this stream is dead, Jim.
- sp<AMessage> notify = it->mNotifyMsg->dup();
- notify->setInt32("rtcp-event", 1);
- notify->setInt32("payload-type", 400);
- notify->setInt32("feedback-type", 1);
- notify->setInt32("sender", it->mSources.valueAt(0)->getSelfID());
- notify->post();
+ for (size_t i = 0; i < it->mSources.size(); ++i) {
+ sp<AMessage> notify = it->mNotifyMsg->dup();
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", 400);
+ notify->setInt32("feedback-type", 1);
+ notify->setInt32("sender", it->mSources.valueAt(i)->getSelfID());
+ notify->post();
- ALOGW("failed to receive RTP/RTCP datagram.");
+ ALOGW("failed to receive RTP/RTCP datagram.");
+ }
it = mStreams.erase(it);
continue;
}
@@ -571,6 +577,13 @@
}
}
+void ARTPConnection::onAlarmStream(const sp<AMessage> msg) {
+ sp<ARTPSource> source = nullptr;
+ if (msg->findObject("source", (sp<android::RefBase>*)&source)) {
+ source->processRTPPacket();
+ }
+}
+
status_t ARTPConnection::receive(StreamInfo *s, bool receiveRTP) {
ALOGV("receiving %s", receiveRTP ? "RTP" : "RTCP");
@@ -656,12 +669,6 @@
}
status_t ARTPConnection::parseRTP(StreamInfo *s, const sp<ABuffer> &buffer) {
- if (s->mNumRTPPacketsReceived++ == 0) {
- sp<AMessage> notify = s->mNotifyMsg->dup();
- notify->setInt32("first-rtp", true);
- notify->post();
- }
-
size_t size = buffer->size();
if (size < 12) {
@@ -743,9 +750,23 @@
meta->setInt32("cvo", cvoDegrees);
}
- buffer->setInt32Data(u16at(&data[2]));
+ int32_t seq = u16at(&data[2]);
+ buffer->setInt32Data(seq);
buffer->setRange(payloadOffset, size - payloadOffset);
+ if (s->mNumRTPPacketsReceived++ == 0) {
+ sp<AMessage> notify = s->mNotifyMsg->dup();
+ notify->setInt32("first-rtp", true);
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", ARTPSource::RTP_FIRST_PACKET);
+ notify->setInt32("rtp-time", (int32_t)rtpTime);
+ notify->setInt32("rtp-seq-num", seq);
+ notify->setInt64("recv-time-us", ALooper::GetNowUs());
+ notify->post();
+
+ ALOGD("send first-rtp event to upper layer");
+ }
+
source->processRTPPacket(buffer);
return OK;
@@ -802,14 +823,12 @@
if (s->mNumRTCPPacketsReceived++ == 0) {
sp<AMessage> notify = s->mNotifyMsg->dup();
notify->setInt32("first-rtcp", true);
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", ARTPSource::RTCP_FIRST_PACKET);
+ notify->setInt64("recv-time-us", ALooper::GetNowUs());
notify->post();
- ALOGI("send first-rtcp event to upper layer as ImsRxNotice");
- sp<AMessage> imsNotify = s->mNotifyMsg->dup();
- imsNotify->setInt32("rtcp-event", 1);
- imsNotify->setInt32("payload-type", 101);
- imsNotify->setInt32("feedback-type", 0);
- imsNotify->post();
+ ALOGD("send first-rtcp event to upper layer");
}
const uint8_t *data = buffer->data();
@@ -906,7 +925,7 @@
int64_t nowUs = ALooper::GetNowUs();
int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
- source->notifyPktInfo(bitrate, true /* isRegular */);
+ source->notifyPktInfo(bitrate, nowUs, true /* isRegular */);
source->byeReceived();
@@ -1088,11 +1107,14 @@
srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
if (mFlags & kViLTEConnection) {
+ setStaticJitterTimeMs(50);
source->setPeriodicFIR(false);
}
source->setSelfID(mSelfID);
source->setStaticJitterTimeMs(mStaticJitterTimeMs);
+ sp<AMessage> timer = new AMessage(kWhatAlarmStream, this);
+ source->setJbTimer(timer);
info->mSources.add(srcId, source);
} else {
source = info->mSources.valueAt(index);
@@ -1140,7 +1162,7 @@
for (size_t i = 0; i < s->mSources.size(); ++i) {
sp<ARTPSource> source = s->mSources.valueAt(i);
if (source->isNeedToEarlyNotify()) {
- source->notifyPktInfo(bitrate, false /* isRegular */);
+ source->notifyPktInfo(bitrate, nowUs, false /* isRegular */);
mLastEarlyNotifyTimeUs = nowUs + (1000000ll * 3600 * 24); // after 1 day
}
}
@@ -1171,7 +1193,7 @@
buffer->setRange(0, 0);
for (size_t i = 0; i < s->mSources.size(); ++i) {
sp<ARTPSource> source = s->mSources.valueAt(i);
- source->notifyPktInfo(bitrate, true /* isRegular */);
+ source->notifyPktInfo(bitrate, nowUs, true /* isRegular */);
}
++it;
}
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index adf9670..36cca31 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -73,6 +73,7 @@
kWhatRemoveStream,
kWhatPollStreams,
kWhatInjectPacket,
+ kWhatAlarmStream,
};
static const int64_t kSelectTimeoutUs;
@@ -98,6 +99,7 @@
void onSeekStream(const sp<AMessage> &msg);
void onRemoveStream(const sp<AMessage> &msg);
void onPollStreams();
+ void onAlarmStream(const sp<AMessage> msg);
void onInjectPacket(const sp<AMessage> &msg);
void onSendReceiverReports();
void checkRxBitrate(int64_t nowUs);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index f960482..38a370b 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -44,10 +44,11 @@
uint32_t id,
const sp<ASessionDescription> &sessionDesc, size_t index,
const sp<AMessage> ¬ify)
- : mFirstSeqNumber(0),
- mFirstRtpTime(0),
+ : mFirstRtpTime(0),
mFirstSysTime(0),
mClockRate(0),
+ mSysAnchorTime(0),
+ mLastSysAnchorTimeUpdatedUs(0),
mFirstSsrc(0),
mHighestNackNumber(0),
mID(id),
@@ -58,9 +59,14 @@
mPrevNumBuffersReceived(0),
mPrevExpectedForRR(0),
mPrevNumBuffersReceivedForRR(0),
+ mLatestRtpTime(0),
mStaticJbTimeMs(kStaticJitterTimeMs),
- mLastNTPTime(0),
- mLastNTPTimeUpdateUs(0),
+ mLastSrRtpTime(0),
+ mLastSrNtpTime(0),
+ mLastSrUpdateTimeUs(0),
+ mIsFirstRtpRtcpGap(true),
+ mAvgRtpRtcpGapMs(0),
+ mAvgUnderlineDelayMs(0),
mIssueFIRRequests(false),
mIssueFIRByAssembler(false),
mLastFIRRequestUs(-1),
@@ -106,6 +112,7 @@
int32_t clockRate, numChannels;
ASessionDescription::ParseFormatDesc(desc.c_str(), &clockRate, &numChannels);
mClockRate = clockRate;
+ mLastJbAlarmTimeUs = 0;
mJitterCalc = new JitterCalc(mClockRate);
}
@@ -119,20 +126,32 @@
}
}
+void ARTPSource::processRTPPacket() {
+ if (mAssembler != NULL && !mQueue.empty()) {
+ mAssembler->onPacketReceived(this);
+ }
+}
+
void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
- mLastNTPTime = ntpTime;
- mLastNTPTimeUpdateUs = ALooper::GetNowUs();
+ mLastSrRtpTime = rtpTime;
+ mLastSrNtpTime = ntpTime;
+ mLastSrUpdateTimeUs = ALooper::GetNowUs();
sp<AMessage> notify = mNotify->dup();
notify->setInt32("time-update", true);
notify->setInt32("rtp-time", rtpTime);
notify->setInt64("ntp-time", ntpTime);
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", RTCP_SR);
+ notify->setInt64("recv-time-us", mLastSrUpdateTimeUs);
notify->post();
}
void ARTPSource::timeReset() {
mFirstRtpTime = 0;
mFirstSysTime = 0;
+ mSysAnchorTime = 0;
+ mLastSysAnchorTimeUpdatedUs = 0;
mFirstSsrc = 0;
mHighestNackNumber = 0;
mHighestSeqNumber = 0;
@@ -142,25 +161,100 @@
mPrevNumBuffersReceived = 0;
mPrevExpectedForRR = 0;
mPrevNumBuffersReceivedForRR = 0;
- mLastNTPTime = 0;
- mLastNTPTimeUpdateUs = 0;
+ mLatestRtpTime = 0;
+ mLastSrRtpTime = 0;
+ mLastSrNtpTime = 0;
+ mLastSrUpdateTimeUs = 0;
+ mIsFirstRtpRtcpGap = true;
+ mAvgRtpRtcpGapMs = 0;
+ mAvgUnderlineDelayMs = 0;
mIssueFIRByAssembler = false;
mLastFIRRequestUs = -1;
}
-bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
- uint32_t seqNum = (uint32_t)buffer->int32Data();
+void ARTPSource::calcTimeGapRtpRtcp(const sp<ABuffer> &buffer, int64_t nowUs) {
+ if (mLastSrUpdateTimeUs == 0) {
+ return;
+ }
- int32_t ssrc = 0;
+ int64_t elapsedMs = (nowUs - mLastSrUpdateTimeUs) / 1000;
+ int64_t elapsedRtpTime = (elapsedMs * (mClockRate / 1000));
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+ int64_t anchorRtpTime = mLastSrRtpTime + elapsedRtpTime;
+ int64_t rtpTimeGap = anchorRtpTime - rtpTime;
+ // rtpTime can not be faster than it's anchor time.
+ // because rtpTime(of rtp packet) represents it's a frame captured time and
+ // anchorRtpTime(of rtcp:sr packet) represents it's a rtp packetized time.
+ if (rtpTimeGap < 0 || rtpTimeGap > (mClockRate * 60)) {
+ // ignore invalid delay gap such as negative delay or later than 1 min.
+ return;
+ }
+
+ int64_t rtpTimeGapMs = (rtpTimeGap * 1000 / mClockRate);
+ if (mIsFirstRtpRtcpGap) {
+ mIsFirstRtpRtcpGap = false;
+ mAvgRtpRtcpGapMs = rtpTimeGapMs;
+ } else {
+ // This is measuring avg rtp timestamp distance between rtp and rtcp:sr packet.
+ // Rtp timestamp of rtp packet represents it's raw frame captured time.
+ // Rtp timestamp of rtcp:sr packet represents it's packetization time.
+ // So that, this value is showing how much time delayed to be a rtp packet
+ // from a raw frame captured time.
+ // This value maybe referred to know a/v sync and sender's own delay of this media stream.
+ mAvgRtpRtcpGapMs = ((mAvgRtpRtcpGapMs * 15) + rtpTimeGapMs) / 16;
+ }
+}
+
+void ARTPSource::calcUnderlineDelay(const sp<ABuffer> &buffer, int64_t nowUs) {
+ int64_t elapsedMs = (nowUs - mSysAnchorTime) / 1000;
+ int64_t elapsedRtpTime = (elapsedMs * (mClockRate / 1000));
+ int64_t expectedRtpTime = mFirstRtpTime + elapsedRtpTime;
+
+ int32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ int32_t delayMs = (expectedRtpTime - rtpTime) / (mClockRate / 1000);
+
+ mAvgUnderlineDelayMs = ((mAvgUnderlineDelayMs * 15) + delayMs) / 16;
+}
+
+void ARTPSource::adjustAnchorTimeIfRequired(int64_t nowUs) {
+ if (nowUs - mLastSysAnchorTimeUpdatedUs < 1000000L) {
+ return;
+ }
+
+ if (mAvgUnderlineDelayMs < -30) {
+ // adjust underline delay a quarter of desired delay like step by step.
+ mSysAnchorTime += (int64_t)(mAvgUnderlineDelayMs * 1000 / 4);
+ ALOGD("anchor time updated: original(%lld), anchor(%lld), diffMs(%lld)",
+ (long long)mFirstSysTime, (long long)mSysAnchorTime,
+ (long long)(mFirstSysTime - mSysAnchorTime) / 1000);
+
+ mAvgUnderlineDelayMs = 0;
+ mLastSysAnchorTimeUpdatedUs = nowUs;
+
+ // reset a jitter stastics since an anchor time adjusted.
+ mJitterCalc->init(mFirstRtpTime, mSysAnchorTime, 0, mStaticJbTimeMs * 1000);
+ }
+}
+
+bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
+ int64_t nowUs = ALooper::GetNowUs();
+ uint32_t seqNum = (uint32_t)buffer->int32Data();
+ int32_t ssrc = 0, rtpTime = 0;
+
buffer->meta()->findInt32("ssrc", &ssrc);
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ mLatestRtpTime = rtpTime;
if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
- uint32_t firstRtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&firstRtpTime));
- mFirstSysTime = ALooper::GetNowUs();
+ mFirstSysTime = nowUs;
+ mSysAnchorTime = nowUs;
+ mLastSysAnchorTimeUpdatedUs = nowUs;
mHighestSeqNumber = seqNum;
mBaseSeqNumber = seqNum;
- mFirstRtpTime = firstRtpTime;
+ mFirstRtpTime = rtpTime;
mFirstSsrc = ssrc;
ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
@@ -179,6 +273,10 @@
return false;
}
+ calcTimeGapRtpRtcp(buffer, nowUs);
+ calcUnderlineDelay(buffer, nowUs);
+ adjustAnchorTimeIfRequired(nowUs);
+
// Only the lower 16-bit of the sequence numbers are transmitted,
// derive the high-order bits by choosing the candidate closest
// to the highest sequence number (extended to 32 bits) received so far.
@@ -363,11 +461,11 @@
uint32_t LSR = 0;
uint32_t DLSR = 0;
- if (mLastNTPTime != 0) {
- LSR = (mLastNTPTime >> 16) & 0xffffffff;
+ if (mLastSrNtpTime != 0) {
+ LSR = (mLastSrNtpTime >> 16) & 0xffffffff;
DLSR = (uint32_t)
- ((ALooper::GetNowUs() - mLastNTPTimeUpdateUs) * 65536.0 / 1E6);
+ ((ALooper::GetNowUs() - mLastSrUpdateTimeUs) * 65536.0 / 1E6);
}
data[24] = LSR >> 24;
@@ -566,6 +664,35 @@
mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
}
+void ARTPSource::setJbTimer(const sp<AMessage> timer) {
+ mJbTimer = timer;
+}
+
+void ARTPSource::setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs) {
+ if (mJbTimer == NULL) {
+ return;
+ }
+ int64_t alarmTimeUs = nowTimeUs + alarmAfterUs;
+ bool alarm = false;
+ if (mLastJbAlarmTimeUs <= nowTimeUs) {
+ // no more alarm in pending.
+ mLastJbAlarmTimeUs = nowTimeUs + alarmAfterUs;
+ alarm = true;
+ } else if (mLastJbAlarmTimeUs > alarmTimeUs + 5000L) {
+ // bring an alarm forward more than 5ms.
+ mLastJbAlarmTimeUs = alarmTimeUs;
+ alarm = true;
+ } else {
+ // would not set alarm if it is close with before one.
+ }
+
+ if (alarm) {
+ sp<AMessage> notify = mJbTimer->dup();
+ notify->setObject("source", this);
+ notify->post(alarmAfterUs);
+ }
+}
+
bool ARTPSource::isNeedToEarlyNotify() {
uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
int32_t intervalExpectedInNow = expected - mPrevExpected;
@@ -576,7 +703,7 @@
return false;
}
-void ARTPSource::notifyPktInfo(int32_t bitrate, bool isRegular) {
+void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t nowUs, bool isRegular) {
int32_t payloadType = isRegular ? RTP_QUALITY : RTP_QUALITY_EMC;
sp<AMessage> notify = mNotify->dup();
@@ -590,6 +717,11 @@
notify->setInt32("prev-expected", mPrevExpected);
notify->setInt32("num-buf-recv", mNumBuffersReceived);
notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
+ notify->setInt32("latest-rtp-time", mLatestRtpTime);
+ notify->setInt64("recv-time-us", nowUs);
+ notify->setInt32("rtp-jitter-time-ms",
+ std::max(getBaseJitterTimeMs(), getStaticJitterTimeMs()));
+ notify->setInt32("rtp-rtcpsr-time-gap-ms", (int32_t)mAvgRtpRtcpGapMs);
notify->post();
if (isRegular) {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index 2d804d8..4984e91 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -31,7 +31,7 @@
namespace android {
-const uint32_t kStaticJitterTimeMs = 50; // 50ms
+const uint32_t kStaticJitterTimeMs = 100; // 100ms
struct ABuffer;
struct AMessage;
@@ -49,6 +49,8 @@
RTCP_FIRST_PACKET = 101,
RTP_QUALITY = 102,
RTP_QUALITY_EMC = 103,
+ RTCP_SR = 200,
+ RTCP_RR = 201,
RTCP_TSFB = 205,
RTCP_PSFB = 206,
RTP_CVO = 300,
@@ -56,6 +58,7 @@
};
void processRTPPacket(const sp<ABuffer> &buffer);
+ void processRTPPacket();
void timeReset();
void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
void byeReceived();
@@ -77,19 +80,23 @@
void setStaticJitterTimeMs(const uint32_t jbTimeMs);
void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
+ void setJbTimer(const sp<AMessage> timer);
+ void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
bool isNeedToEarlyNotify();
- void notifyPktInfo(int32_t bitrate, bool isRegular);
+ void notifyPktInfo(int32_t bitrate, int64_t nowUs, bool isRegular);
// FIR needs to be sent by missing packet or broken video image.
void onIssueFIRByAssembler();
void noticeAbandonBuffer(int cnt=1);
- int32_t mFirstSeqNumber;
uint32_t mFirstRtpTime;
int64_t mFirstSysTime;
int32_t mClockRate;
+ int64_t mSysAnchorTime;
+ int64_t mLastSysAnchorTimeUpdatedUs;
+
int32_t mFirstSsrc;
int32_t mHighestNackNumber;
@@ -104,11 +111,14 @@
uint32_t mPrevExpectedForRR;
int32_t mPrevNumBuffersReceivedForRR;
+ uint32_t mLatestRtpTime;
+
List<sp<ABuffer> > mQueue;
sp<ARTPAssembler> mAssembler;
int32_t mStaticJbTimeMs;
sp<JitterCalc> mJitterCalc;
+ sp<AMessage> mJbTimer;
typedef struct infoNACK {
uint16_t seqNum;
@@ -121,8 +131,14 @@
std::map<uint16_t, infoNACK> mNACKMap;
int getSeqNumToNACK(List<int>& list, int size);
- uint64_t mLastNTPTime;
- int64_t mLastNTPTimeUpdateUs;
+ uint32_t mLastSrRtpTime;
+ uint64_t mLastSrNtpTime;
+ int64_t mLastSrUpdateTimeUs;
+
+ bool mIsFirstRtpRtcpGap;
+ double mAvgRtpRtcpGapMs;
+ double mAvgUnderlineDelayMs;
+ int64_t mLastJbAlarmTimeUs;
bool mIssueFIRRequests;
bool mIssueFIRByAssembler;
@@ -131,6 +147,10 @@
sp<AMessage> mNotify;
+ void calcTimeGapRtpRtcp(const sp<ABuffer> &buffer, int64_t nowUs);
+ void calcUnderlineDelay(const sp<ABuffer> &buffer, int64_t nowUs);
+ void adjustAnchorTimeIfRequired(int64_t nowUs);
+
bool queuePacket(const sp<ABuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(ARTPSource);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 29e263d..11c7aeb 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -204,8 +204,6 @@
mRTPTimeBase = 0;
mNumRTPSent = 0;
mNumRTPOctetsSent = 0;
- mLastRTPTime = 0;
- mLastNTPTime = 0;
mOpponentID = 0;
mBitrate = 192000;
@@ -216,6 +214,7 @@
mRTPSockNetwork = 0;
mMode = INVALID;
+ mClockRate = 16000;
}
status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
@@ -265,15 +264,28 @@
updateSocketNetwork(sockNetwork);
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ // rfc6184: RTP Payload Format for H.264 Video
+ // The clock rate in the "a=rtpmap" line MUST be 90000.
mMode = H264;
+ mClockRate = 90000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ // rfc7798: RTP Payload Format for High Efficiency Video Coding (HEVC)
+ // The clock rate in the "a=rtpmap" line MUST be 90000.
mMode = H265;
+ mClockRate = 90000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
mMode = H263;
+ // rfc4629: RTP Payload Format for ITU-T Rec. H.263 Video
+ // The clock rate in the "a=rtpmap" line MUST be 90000.
+ mClockRate = 90000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
mMode = AMR_NB;
+ // rfc4867: RTP Payload Format ... (AMR) and (AMR-WB)
+ // The RTP clock rate in "a=rtpmap" MUST be 8000 for AMR and 16000 for AMR-WB
+ mClockRate = 8000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
mMode = AMR_WB;
+ mClockRate = 16000;
} else {
TRESPASS();
}
@@ -646,19 +658,27 @@
data[6] = (mSourceID >> 8) & 0xff;
data[7] = mSourceID & 0xff;
- data[8] = mLastNTPTime >> (64 - 8);
- data[9] = (mLastNTPTime >> (64 - 16)) & 0xff;
- data[10] = (mLastNTPTime >> (64 - 24)) & 0xff;
- data[11] = (mLastNTPTime >> 32) & 0xff;
- data[12] = (mLastNTPTime >> 24) & 0xff;
- data[13] = (mLastNTPTime >> 16) & 0xff;
- data[14] = (mLastNTPTime >> 8) & 0xff;
- data[15] = mLastNTPTime & 0xff;
+ uint64_t ntpTime = GetNowNTP();
+ data[8] = ntpTime >> (64 - 8);
+ data[9] = (ntpTime >> (64 - 16)) & 0xff;
+ data[10] = (ntpTime >> (64 - 24)) & 0xff;
+ data[11] = (ntpTime >> 32) & 0xff;
+ data[12] = (ntpTime >> 24) & 0xff;
+ data[13] = (ntpTime >> 16) & 0xff;
+ data[14] = (ntpTime >> 8) & 0xff;
+ data[15] = ntpTime & 0xff;
- data[16] = (mLastRTPTime >> 24) & 0xff;
- data[17] = (mLastRTPTime >> 16) & 0xff;
- data[18] = (mLastRTPTime >> 8) & 0xff;
- data[19] = mLastRTPTime & 0xff;
+ // A current rtpTime can be calculated from ALooper::GetNowUs().
+ // This is expecting a timestamp of raw frame from a media source is
+ // on the same time context across components in android media framework
+ // which can be queried by ALooper::GetNowUs().
+ // In other words, ALooper::GetNowUs() is on the same timeline as the time
+ // of kKeyTime in a MediaBufferBase
+ uint32_t rtpTime = getRtpTime(ALooper::GetNowUs());
+ data[16] = (rtpTime >> 24) & 0xff;
+ data[17] = (rtpTime >> 16) & 0xff;
+ data[18] = (rtpTime >> 8) & 0xff;
+ data[19] = rtpTime & 0xff;
data[20] = mNumRTPSent >> 24;
data[21] = (mNumRTPSent >> 16) & 0xff;
@@ -780,6 +800,13 @@
return (hi << 32) | lo;
}
+uint32_t ARTPWriter::getRtpTime(int64_t timeUs) {
+ int32_t clockPerMs = mClockRate / 1000;
+ int64_t rtpTime = mRTPTimeBase + (timeUs * clockPerMs / 1000LL);
+
+ return (uint32_t)rtpTime;
+}
+
void ARTPWriter::dumpSessionDesc() {
AString sdp;
sdp = "v=0\r\n";
@@ -981,7 +1008,7 @@
sendVPSSPSPPSIfIFrame(mediaBuf, timeUs);
- uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+ uint32_t rtpTime = getRtpTime(timeUs);
CHECK(mediaBuf->range_length() > 0);
const uint8_t *mediaData =
@@ -1156,9 +1183,6 @@
offset += size;
}
}
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
@@ -1170,7 +1194,7 @@
sendSPSPPSIfIFrame(mediaBuf, timeUs);
- uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+ uint32_t rtpTime = getRtpTime(timeUs);
CHECK(mediaBuf->range_length() > 0);
const uint8_t *mediaData =
@@ -1343,9 +1367,6 @@
offset += size;
}
}
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::sendH263Data(MediaBufferBase *mediaBuf) {
@@ -1354,7 +1375,7 @@
int64_t timeUs;
CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
- uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+ uint32_t rtpTime = getRtpTime(timeUs);
const uint8_t *mediaData =
(const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
@@ -1405,9 +1426,6 @@
++mNumRTPSent;
mNumRTPOctetsSent += buffer->size() - 12;
}
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::updateCVODegrees(int32_t cvoDegrees) {
@@ -1490,7 +1508,7 @@
int64_t timeUs;
CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
- uint32_t rtpTime = mRTPTimeBase + (timeUs / (isWide ? 250 : 125));
+ uint32_t rtpTime = getRtpTime(timeUs);
// hexdump(mediaData, mediaLength);
@@ -1564,9 +1582,6 @@
++mSeqNo;
++mNumRTPSent;
mNumRTPOctetsSent += buffer->size() - 12;
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 28d6ec5..2982cf6 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -108,14 +108,13 @@
MediaBufferBase *mSPSBuf;
MediaBufferBase *mPPSBuf;
+ uint32_t mClockRate;
uint32_t mSourceID;
uint32_t mPayloadType;
uint32_t mSeqNo;
uint32_t mRTPTimeBase;
uint32_t mNumRTPSent;
uint32_t mNumRTPOctetsSent;
- uint32_t mLastRTPTime;
- uint64_t mLastNTPTime;
uint32_t mOpponentID;
uint32_t mBitrate;
@@ -136,6 +135,7 @@
} mMode;
static uint64_t GetNowNTP();
+ uint32_t getRtpTime(int64_t timeUs);
void initState();
void onRead(const sp<AMessage> &msg);
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 79b192e..e25658f 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -35,7 +35,6 @@
"android.hardware.media.omx@1.0",
"libandroidicu",
"libfmq",
- "libbase",
"libbinder",
"libhidlbase",
"liblog",
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index dc1b9b8..58e2d2a 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "mediaserver"
//#define LOG_NDEBUG 0
-#include <android-base/properties.h>
#include <binder/IPCThreadState.h>
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
@@ -43,12 +42,6 @@
ResourceManagerService::instantiate();
registerExtensions();
::android::hardware::configureRpcThreadpool(16, false);
-
- if (!android::base::GetBoolProperty("ro.config.low_ram", false)) {
- // Start the media.transcoding service if the device is not low ram
- // device.
- android::base::SetProperty("ctl.start", "media.transcoding");
- }
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
::android::hardware::joinRpcThreadpool();
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index b6d015c..c73c17d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -508,7 +508,7 @@
switch (commDeviceType) {
case AUDIO_DEVICE_OUT_BLE_HEADSET:
device = availableDevices.getDevice(
- AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+ AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
break;
case AUDIO_DEVICE_OUT_SPEAKER:
device = availableDevices.getFirstExistingDevice({
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 9987252..77223b6 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1916,6 +1916,14 @@
return Status::ok();
}
+Status AudioPolicyService::setHotwordDetectionServiceUid(int32_t uidAidl)
+{
+ uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
+ Mutex::Autolock _l(mLock);
+ mUidPolicy->setHotwordDetectionServiceUid(uid);
+ return Status::ok();
+}
+
Status AudioPolicyService::setA11yServicesUids(const std::vector<int32_t>& uidsAidl)
{
size_t size = uidsAidl.size();
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 3b77ed8..7ed829c 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -199,6 +199,7 @@
binder::Status setSurroundFormatEnabled(media::audio::common::AudioFormat audioFormat,
bool enabled) override;
binder::Status setAssistantUid(int32_t uid) override;
+ binder::Status setHotwordDetectionServiceUid(int32_t uid) override;
binder::Status setA11yServicesUids(const std::vector<int32_t>& uids) override;
binder::Status setCurrentImeUid(int32_t uid) override;
binder::Status isHapticPlaybackSupported(bool* _aidl_return) override;
@@ -376,7 +377,8 @@
public:
explicit UidPolicy(wp<AudioPolicyService> service)
: mService(service), mObserverRegistered(false),
- mAssistantUid(0), mCurrentImeUid(0), mRttEnabled(false) {}
+ mAssistantUid(0), mHotwordDetectionServiceUid(0), mCurrentImeUid(0),
+ mRttEnabled(false) {}
void registerSelf();
void unregisterSelf();
@@ -386,8 +388,13 @@
bool isUidActive(uid_t uid);
int getUidState(uid_t uid);
- void setAssistantUid(uid_t uid) { mAssistantUid = uid; }
- bool isAssistantUid(uid_t uid) { return uid == mAssistantUid; }
+ void setAssistantUid(uid_t uid) { mAssistantUid = uid; };
+ void setHotwordDetectionServiceUid(uid_t uid) { mHotwordDetectionServiceUid = uid; }
+ bool isAssistantUid(uid_t uid) const {
+ // The HotwordDetectionService is part of the Assistant package but runs with a separate
+ // (isolated) uid, so we check for either uid here.
+ return uid == mAssistantUid || uid == mHotwordDetectionServiceUid;
+ }
void setA11yUids(const std::vector<uid_t>& uids) { mA11yUids.clear(); mA11yUids = uids; }
bool isA11yUid(uid_t uid);
bool isA11yOnTop();
@@ -423,6 +430,7 @@
std::unordered_map<uid_t, std::pair<bool, int>> mOverrideUids;
std::unordered_map<uid_t, std::pair<bool, int>> mCachedUids;
uid_t mAssistantUid = -1;
+ uid_t mHotwordDetectionServiceUid = -1;
std::vector<uid_t> mA11yUids;
uid_t mCurrentImeUid = -1;
bool mRttEnabled = false;
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index c28c24b..26562e0 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -83,6 +83,7 @@
"device3/Camera3OutputStreamInterface.cpp",
"device3/Camera3OutputUtils.cpp",
"device3/Camera3DeviceInjectionMethods.cpp",
+ "device3/UHRCropAndMeteringRegionMapper.cpp",
"gui/RingBufferConsumer.cpp",
"hidl/AidlCameraDeviceCallbacks.cpp",
"hidl/AidlCameraServiceListener.cpp",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index dc101ff..d0d3a9d 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -1840,10 +1840,31 @@
}
// Set camera muting behavior
+ bool isCameraPrivacyEnabled =
+ mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
if (client->supportsCameraMute()) {
- bool isCameraPrivacyEnabled =
- mSensorPrivacyPolicy->isCameraPrivacyEnabled(multiuser_get_user_id(clientUid));
- client->setCameraMute(mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+ client->setCameraMute(
+ mOverrideCameraMuteMode || isCameraPrivacyEnabled);
+ } else if (isCameraPrivacyEnabled) {
+ // no camera mute supported, but privacy is on! => disconnect
+ ALOGI("Camera mute not supported for package: %s, camera id: %s",
+ String8(client->getPackageName()).string(), cameraId.string());
+ // Do not hold mServiceLock while disconnecting clients, but
+ // retain the condition blocking other clients from connecting
+ // in mServiceLockWrapper if held.
+ mServiceLock.unlock();
+ // Clear caller identity temporarily so client disconnect PID
+ // checks work correctly
+ int64_t token = CameraThreadState::clearCallingIdentity();
+ // Note AppOp to trigger the "Unblock" dialog
+ client->noteAppOp();
+ client->disconnect();
+ CameraThreadState::restoreCallingIdentity(token);
+ // Reacquire mServiceLock
+ mServiceLock.lock();
+
+ return STATUS_ERROR_FMT(ERROR_DISABLED,
+ "Camera \"%s\" disabled due to camera mute", cameraId.string());
}
if (shimUpdateOnly) {
@@ -3201,6 +3222,27 @@
return OK;
}
+status_t CameraService::BasicClient::noteAppOp() {
+ ATRACE_CALL();
+
+ ALOGV("%s: Start camera noteAppOp, package name = %s, client UID = %d",
+ __FUNCTION__, String8(mClientPackageName).string(), mClientUid);
+
+ // noteAppOp is only used for when camera mute is not supported, in order
+ // to trigger the sensor privacy "Unblock" dialog
+ if (mAppOpsManager != nullptr) {
+ int32_t mode = mAppOpsManager->noteOp(AppOpsManager::OP_CAMERA, mClientUid,
+ mClientPackageName, mClientFeatureId,
+ String16("start camera ") + String16(mCameraIdStr));
+ status_t res = handleAppOpMode(mode);
+ if (res != OK) {
+ return res;
+ }
+ }
+
+ return OK;
+}
+
status_t CameraService::BasicClient::finishCameraStreamingOps() {
ATRACE_CALL();
@@ -3293,10 +3335,13 @@
// If the calling Uid is trusted (a native service), or the client Uid is active (WAR for
// b/175320666), the AppOpsManager could return MODE_IGNORED. Do not treat such cases as
// error.
- if (!mUidIsTrusted && isUidActive && isCameraPrivacyEnabled) {
- setCameraMute(true);
- } else if (!mUidIsTrusted && !isUidActive) {
- block();
+ if (!mUidIsTrusted) {
+ if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
+ setCameraMute(true);
+ } else if (!isUidActive
+ || (isCameraPrivacyEnabled && !supportsCameraMute())) {
+ block();
+ }
}
} else if (res == AppOpsManager::MODE_ALLOWED) {
setCameraMute(sCameraService->mOverrideCameraMuteMode);
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 9021170..bc2e347 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -371,6 +371,9 @@
virtual status_t finishCameraOps();
// Handle errors for start/checkOps
virtual status_t handleAppOpMode(int32_t mode);
+ // Just notify camera appops to trigger unblocking dialog if sensor
+ // privacy is enabled and camera mute is not supported
+ virtual status_t noteAppOp();
std::unique_ptr<AppOpsManager> mAppOpsManager = nullptr;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0101c58..fd645c7 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -171,6 +171,13 @@
mZoomRatioMappers[physicalId] = ZoomRatioMapper(
&mPhysicalDeviceInfoMap[physicalId],
mSupportNativeZoomRatio, usePrecorrectArray);
+
+ if (SessionConfigurationUtils::isUltraHighResolutionSensor(
+ mPhysicalDeviceInfoMap[physicalId])) {
+ mUHRCropAndMeteringRegionMappers[physicalId] =
+ UHRCropAndMeteringRegionMapper(mPhysicalDeviceInfoMap[physicalId],
+ usePrecorrectArray);
+ }
}
}
@@ -299,9 +306,25 @@
sessionParamKeys.insertArrayAt(sessionKeysEntry.data.i32, 0, sessionKeysEntry.count);
}
+ camera_metadata_entry_t availableTestPatternModes = mDeviceInfo.find(
+ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES);
+ for (size_t i = 0; i < availableTestPatternModes.count; i++) {
+ if (availableTestPatternModes.data.i32[i] ==
+ ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
+ mSupportCameraMute = true;
+ mSupportTestPatternSolidColor = true;
+ break;
+ } else if (availableTestPatternModes.data.i32[i] ==
+ ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
+ mSupportCameraMute = true;
+ mSupportTestPatternSolidColor = false;
+ }
+ }
+
/** Start up request queue thread */
mRequestThread = new RequestThread(
- this, mStatusTracker, mInterface, sessionParamKeys, mUseHalBufManager);
+ this, mStatusTracker, mInterface, sessionParamKeys,
+ mUseHalBufManager, mSupportCameraMute);
res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
@@ -348,23 +371,13 @@
mZoomRatioMappers[mId.c_str()] = ZoomRatioMapper(&mDeviceInfo,
mSupportNativeZoomRatio, usePrecorrectArray);
- if (RotateAndCropMapper::isNeeded(&mDeviceInfo)) {
- mRotateAndCropMappers.emplace(mId.c_str(), &mDeviceInfo);
+ if (SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo)) {
+ mUHRCropAndMeteringRegionMappers[mId.c_str()] =
+ UHRCropAndMeteringRegionMapper(mDeviceInfo, usePrecorrectArray);
}
- camera_metadata_entry_t availableTestPatternModes = mDeviceInfo.find(
- ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES);
- for (size_t i = 0; i < availableTestPatternModes.count; i++) {
- if (availableTestPatternModes.data.i32[i] ==
- ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
- mSupportCameraMute = true;
- mSupportTestPatternSolidColor = true;
- break;
- } else if (availableTestPatternModes.data.i32[i] ==
- ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) {
- mSupportCameraMute = true;
- mSupportTestPatternSolidColor = false;
- }
+ if (RotateAndCropMapper::isNeeded(&mDeviceInfo)) {
+ mRotateAndCropMappers.emplace(mId.c_str(), &mDeviceInfo);
}
mInjectionMethods = new Camera3DeviceInjectionMethods(this);
@@ -2450,9 +2463,9 @@
auto testPatternDataEntry =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
- if (testPatternDataEntry.count > 0) {
- memcpy(newRequest->mOriginalTestPatternData, testPatternModeEntry.data.i32,
- sizeof(newRequest->mOriginalTestPatternData));
+ if (testPatternDataEntry.count >= 4) {
+ memcpy(newRequest->mOriginalTestPatternData, testPatternDataEntry.data.i32,
+ sizeof(CaptureRequest::mOriginalTestPatternData));
} else {
newRequest->mOriginalTestPatternData[0] = 0;
newRequest->mOriginalTestPatternData[1] = 0;
@@ -4143,7 +4156,8 @@
Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
sp<StatusTracker> statusTracker,
sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
- bool useHalBufManager) :
+ bool useHalBufManager,
+ bool supportCameraMute) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
@@ -4169,7 +4183,8 @@
mRequestLatency(kRequestLatencyBinSize),
mSessionParamKeys(sessionParamKeys),
mLatestSessionParams(sessionParamKeys.size()),
- mUseHalBufManager(useHalBufManager) {
+ mUseHalBufManager(useHalBufManager),
+ mSupportCameraMute(supportCameraMute){
mStatusId = statusTracker->addComponent("RequestThread");
}
@@ -4815,12 +4830,33 @@
}
{
- // Correct metadata regions for distortion correction if enabled
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
List<PhysicalCameraSettings>::iterator it;
for (it = captureRequest->mSettingsList.begin();
it != captureRequest->mSettingsList.end(); it++) {
+ if (parent->mUHRCropAndMeteringRegionMappers.find(it->cameraId) ==
+ parent->mUHRCropAndMeteringRegionMappers.end()) {
+ continue;
+ }
+
+ if (!captureRequest->mUHRCropAndMeteringRegionsUpdated) {
+ res = parent->mUHRCropAndMeteringRegionMappers[it->cameraId].
+ updateCaptureRequest(&(it->metadata));
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to correct capture requests "
+ "for scaler crop region and metering regions for request "
+ "%d: %s (%d)", halRequest->frame_number, strerror(-res),
+ res);
+ return INVALID_OPERATION;
+ }
+ captureRequest->mUHRCropAndMeteringRegionsUpdated = true;
+ }
+ }
+
+ // Correct metadata regions for distortion correction if enabled
+ for (it = captureRequest->mSettingsList.begin();
+ it != captureRequest->mSettingsList.end(); it++) {
if (parent->mDistortionMappers.find(it->cameraId) ==
parent->mDistortionMappers.end()) {
continue;
@@ -5829,6 +5865,8 @@
const sp<CaptureRequest> &request) {
ATRACE_CALL();
+ if (!mSupportCameraMute) return false;
+
Mutex::Autolock l(mTriggerMutex);
bool changed = false;
@@ -5864,16 +5902,16 @@
}
auto testPatternColor = metadata.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
- if (testPatternColor.count > 0) {
+ if (testPatternColor.count >= 4) {
for (size_t i = 0; i < 4; i++) {
- if (testPatternColor.data.i32[i] != (int32_t)testPatternData[i]) {
+ if (testPatternColor.data.i32[i] != testPatternData[i]) {
testPatternColor.data.i32[i] = testPatternData[i];
changed = true;
}
}
} else {
metadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA,
- (int32_t*)testPatternData, 4);
+ testPatternData, 4);
changed = true;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 53a696f..39714f0 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -50,6 +50,7 @@
#include "device3/DistortionMapper.h"
#include "device3/ZoomRatioMapper.h"
#include "device3/RotateAndCropMapper.h"
+#include "device3/UHRCropAndMeteringRegionMapper.h"
#include "device3/InFlightRequest.h"
#include "device3/Camera3OutputInterface.h"
#include "device3/Camera3OfflineSession.h"
@@ -589,6 +590,9 @@
bool mRotationAndCropUpdated = false;
// Whether this capture request's zoom ratio update has been done.
bool mZoomRatioUpdated = false;
+ // Whether this max resolution capture request's crop / metering region update has been
+ // done.
+ bool mUHRCropAndMeteringRegionsUpdated = false;
};
typedef List<sp<CaptureRequest> > RequestList;
@@ -809,7 +813,8 @@
sp<camera3::StatusTracker> statusTracker,
sp<HalInterface> interface,
const Vector<int32_t>& sessionParamKeys,
- bool useHalBufManager);
+ bool useHalBufManager,
+ bool supportCameraMute);
~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -1082,6 +1087,7 @@
std::map<int32_t, std::set<String8>> mGroupIdPhysicalCameraMap;
const bool mUseHalBufManager;
+ const bool mSupportCameraMute;
};
sp<RequestThread> mRequestThread;
@@ -1224,6 +1230,12 @@
std::unordered_map<std::string, camera3::ZoomRatioMapper> mZoomRatioMappers;
/**
+ * UHR request crop / metering region mapper support
+ */
+ std::unordered_map<std::string, camera3::UHRCropAndMeteringRegionMapper>
+ mUHRCropAndMeteringRegionMappers;
+
+ /**
* RotateAndCrop mapper support
*/
std::unordered_map<std::string, camera3::RotateAndCropMapper> mRotateAndCropMappers;
diff --git a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
new file mode 100644
index 0000000..c558d91
--- /dev/null
+++ b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.cpp
@@ -0,0 +1,168 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-UHRCropAndMeteringRegionMapper"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <algorithm>
+#include <cmath>
+
+#include "device3/UHRCropAndMeteringRegionMapper.h"
+#include "utils/SessionConfigurationUtils.h"
+
+namespace android {
+
+namespace camera3 {
+// For Capture request
+// metering region -> {fwk private key for metering region set, true}
+static std::unordered_map<uint32_t, std::pair<uint32_t, uint32_t>> kMeteringRegionsToCorrect = {
+ {ANDROID_CONTROL_AF_REGIONS,
+ {ANDROID_CONTROL_AF_REGIONS_SET, ANDROID_CONTROL_AF_REGIONS_SET_TRUE}},
+ {ANDROID_CONTROL_AE_REGIONS,
+ {ANDROID_CONTROL_AE_REGIONS_SET, ANDROID_CONTROL_AE_REGIONS_SET_TRUE}},
+ {ANDROID_CONTROL_AWB_REGIONS,
+ {ANDROID_CONTROL_AWB_REGIONS_SET, ANDROID_CONTROL_AWB_REGIONS_SET_TRUE}}
+};
+
+UHRCropAndMeteringRegionMapper::UHRCropAndMeteringRegionMapper(const CameraMetadata &deviceInfo,
+ bool usePreCorrectedArray) {
+
+ if (usePreCorrectedArray) {
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(&deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &mArrayWidth,
+ &mArrayHeight)) {
+ ALOGE("%s: Couldn't get pre correction active array size", __FUNCTION__);
+ return;
+ }
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(&deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ &mArrayWidthMaximumResolution, &mArrayHeightMaximumResolution)) {
+ ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
+ __FUNCTION__);
+ return;
+ }
+ } else {
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(&deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &mArrayWidth,
+ &mArrayHeight)) {
+ ALOGE("%s: Couldn't get active array size", __FUNCTION__);
+ return;
+ }
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(&deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ &mArrayWidthMaximumResolution, &mArrayHeightMaximumResolution)) {
+ ALOGE("%s: Couldn't get maximum resolution active array size", __FUNCTION__);
+ return;
+ }
+
+ }
+
+ mIsValid = true;
+
+ ALOGV("%s: array size: %d x %d, full res array size: %d x %d,",
+ __FUNCTION__, mArrayWidth, mArrayHeight, mArrayWidthMaximumResolution,
+ mArrayHeightMaximumResolution);
+}
+
+void UHRCropAndMeteringRegionMapper::fixMeteringRegionsIfNeeded(CameraMetadata *request) {
+ if (request == nullptr) {
+ ALOGE("%s request is nullptr, can't fix crop region", __FUNCTION__);
+ return;
+ }
+ for (const auto &entry : kMeteringRegionsToCorrect) {
+ // Check if the metering region Set key is set to TRUE, we don't
+ // need to correct the metering regions.
+ camera_metadata_entry meteringRegionsSetEntry =
+ request->find(entry.second.first);
+ if (meteringRegionsSetEntry.count == 1 &&
+ meteringRegionsSetEntry.data.u8[0] == entry.second.second) {
+ // metering region set by client, doesn't need to be fixed.
+ continue;
+ }
+ camera_metadata_entry meteringRegionEntry = request->find(entry.first);
+ if (meteringRegionEntry.count % 5 != 0) {
+ ALOGE("%s: Metering region entry for tag %d does not have a valid number of entries, "
+ "skipping", __FUNCTION__, (int)entry.first);
+ continue;
+ }
+ for (size_t j = 0; j < meteringRegionEntry.count; j += 5) {
+ int32_t *meteringRegionStart = meteringRegionEntry.data.i32 + j;
+ meteringRegionStart[0] = 0;
+ meteringRegionStart[1] = 0;
+ meteringRegionStart[2] = mArrayWidthMaximumResolution;
+ meteringRegionStart[3] = mArrayHeightMaximumResolution;
+ }
+ }
+}
+
+void UHRCropAndMeteringRegionMapper::fixCropRegionsIfNeeded(CameraMetadata *request) {
+ if (request == nullptr) {
+ ALOGE("%s request is nullptr, can't fix crop region", __FUNCTION__);
+ return;
+ }
+ // Check if the scalerCropRegionSet key is set to TRUE, we don't
+ // need to correct the crop region.
+ camera_metadata_entry cropRegionSetEntry =
+ request->find(ANDROID_SCALER_CROP_REGION_SET);
+ if (cropRegionSetEntry.count == 1 &&
+ cropRegionSetEntry.data.u8[0] == ANDROID_SCALER_CROP_REGION_SET_TRUE) {
+ // crop regions set by client, doesn't need to be fixed.
+ return;
+ }
+ camera_metadata_entry_t cropRegionEntry = request->find(ANDROID_SCALER_CROP_REGION);
+ if (cropRegionEntry.count == 4) {
+ cropRegionEntry.data.i32[0] = 0;
+ cropRegionEntry.data.i32[1] = 0;
+ cropRegionEntry.data.i32[2] = mArrayWidthMaximumResolution;
+ cropRegionEntry.data.i32[3] = mArrayHeightMaximumResolution;
+ }
+}
+
+status_t UHRCropAndMeteringRegionMapper::updateCaptureRequest(CameraMetadata* request) {
+ if (request == nullptr) {
+ ALOGE("%s Invalid request, request is nullptr", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (!mIsValid) {
+ ALOGE("%s UHRCropAndMeteringRegionMapper didn't initialize correctly", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ camera_metadata_entry sensorPixelModeEntry = request->find(ANDROID_SENSOR_PIXEL_MODE);
+
+ // Check if this is max resolution capture, if not, we don't need to do
+ // anything.
+ if (sensorPixelModeEntry.count != 0) {
+ int32_t sensorPixelMode = sensorPixelModeEntry.data.u8[0];
+ if (sensorPixelMode != ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) {
+ // Correction not needed for default mode requests.
+ return OK;
+ }
+ } else {
+ // sensor pixel mode not set -> default sensor pixel mode request, which
+ // doesn't need correction.
+ return OK;
+ }
+
+ fixCropRegionsIfNeeded(request);
+ fixMeteringRegionsIfNeeded(request);
+ return OK;
+}
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.h b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.h
new file mode 100644
index 0000000..a026e6d
--- /dev/null
+++ b/services/camera/libcameraservice/device3/UHRCropAndMeteringRegionMapper.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_UHRCROP_REGIONS_MAPPER_H
+#define ANDROID_SERVERS_UHRCROP_REGIONS_MAPPER_H
+
+#include <utils/Errors.h>
+#include <array>
+
+#include "camera/CameraMetadata.h"
+
+namespace android {
+
+namespace camera3 {
+
+/**
+ * Utilities to transform SCALER_CROP_REGION and metering regions for ultra high
+ * resolution sensors.
+ */
+class UHRCropAndMeteringRegionMapper {
+ public:
+ UHRCropAndMeteringRegionMapper() = default;
+ UHRCropAndMeteringRegionMapper(const CameraMetadata &deviceInfo, bool usePreCorrectionArray);
+
+ /**
+ * Adjust capture request assuming rotate and crop AUTO is enabled
+ */
+ status_t updateCaptureRequest(CameraMetadata *request);
+
+ private:
+
+ void fixCropRegionsIfNeeded(CameraMetadata *request);
+ void fixMeteringRegionsIfNeeded(CameraMetadata *request);
+
+ int32_t mArrayWidth = 0;
+ int32_t mArrayHeight = 0;
+ int32_t mArrayWidthMaximumResolution = 0;
+ int32_t mArrayHeightMaximumResolution = 0;
+ bool mIsValid = false;
+}; // class UHRCropAndMeteringRegionMapper
+
+} // namespace camera3
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 1a39510..7ec0956 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -129,20 +129,6 @@
return OK;
}
-static bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
- int32_t arrayTag, int32_t *width, int32_t *height) {
- if (width == nullptr || height == nullptr) {
- ALOGE("%s: width / height nullptr", __FUNCTION__);
- return false;
- }
- camera_metadata_ro_entry_t entry;
- entry = deviceInfo->find(arrayTag);
- if (entry.count != 4) return false;
- *width = entry.data.i32[2];
- *height = entry.data.i32[3];
- return true;
-}
-
ZoomRatioMapper::ZoomRatioMapper(const CameraMetadata* deviceInfo,
bool supportNativeZoomRatio, bool usePrecorrectArray) {
initRemappedKeys();
@@ -156,13 +142,13 @@
int32_t activeMaximumResolutionW = 0;
int32_t activeMaximumResolutionH = 0;
- if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
- &arrayW, &arrayH)) {
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, &arrayW, &arrayH)) {
ALOGE("%s: Couldn't get pre correction active array size", __FUNCTION__);
return;
}
- if (!getArrayWidthAndHeight(deviceInfo, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
- &activeW, &activeH)) {
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &activeW, &activeH)) {
ALOGE("%s: Couldn't get active array size", __FUNCTION__);
return;
}
@@ -170,14 +156,14 @@
bool isUltraHighResolutionSensor =
camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(*deviceInfo);
if (isUltraHighResolutionSensor) {
- if (!getArrayWidthAndHeight(deviceInfo,
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
&arrayMaximumResolutionW, &arrayMaximumResolutionH)) {
ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
__FUNCTION__);
return;
}
- if (!getArrayWidthAndHeight(deviceInfo,
+ if (!SessionConfigurationUtils::getArrayWidthAndHeight(deviceInfo,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
&activeMaximumResolutionW, &activeMaximumResolutionH)) {
ALOGE("%s: Couldn't get maximum resolution pre correction active array size",
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 454c05f..a239c81 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -142,19 +142,19 @@
case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS:
return ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS:
- return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS;
+ return ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS:
return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS:
return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS:
- return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS;
+ return ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
return ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
- return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS;
+ return ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION;
case ANDROID_SENSOR_OPAQUE_RAW_SIZE:
return ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION;
case ANDROID_LENS_INTRINSIC_CALIBRATION:
@@ -169,6 +169,19 @@
return -1;
}
+bool SessionConfigurationUtils::getArrayWidthAndHeight(const CameraMetadata *deviceInfo,
+ int32_t arrayTag, int32_t *width, int32_t *height) {
+ if (width == nullptr || height == nullptr) {
+ ALOGE("%s: width / height nullptr", __FUNCTION__);
+ return false;
+ }
+ camera_metadata_ro_entry_t entry;
+ entry = deviceInfo->find(arrayTag);
+ if (entry.count != 4) return false;
+ *width = entry.data.i32[2];
+ *height = entry.data.i32[3];
+ return true;
+}
StreamConfigurationPair
SessionConfigurationUtils::getStreamConfigurationPair(const CameraMetadata &staticInfo) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 1fbaa69..192e241 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -85,6 +85,9 @@
android_dataspace dataSpace, const CameraMetadata& info, bool maxResolution,
/*out*/int32_t* outWidth, /*out*/int32_t* outHeight);
+ static bool getArrayWidthAndHeight(const CameraMetadata *deviceInfo, int32_t arrayTag,
+ int32_t *width, int32_t *height);
+
//check if format is not custom format
static bool isPublicFormat(int32_t format);