Merge "media: Move video buffer timestamp adjustment from CodecSource to GraphicBufferSource." into nyc-mr1-dev
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 8a305de..c4e5df7 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -58,6 +58,7 @@
mDisconnectReplyID(0),
mBuffering(false),
mInPreparationPhase(true),
+ mEOSPending(false),
mSeekGeneration(0),
mEOSTimeoutAudio(0),
mEOSTimeoutVideo(0) {
@@ -200,34 +201,28 @@
status_t finalResult;
if (!source->hasBufferAvailable(&finalResult)) {
if (finalResult == OK) {
- int64_t mediaDurationUs = 0;
- getDuration(&mediaDurationUs);
- sp<AnotherPacketSource> otherSource = getSource(!audio);
- status_t otherFinalResult;
- // If other source already signaled EOS, this source should also signal EOS
- if (otherSource != NULL &&
- !otherSource->hasBufferAvailable(&otherFinalResult) &&
- otherFinalResult == ERROR_END_OF_STREAM) {
- source->signalEOS(ERROR_END_OF_STREAM);
+ // If other source already signaled EOS, this source should also return EOS
+ if (sourceReachedEOS(!audio)) {
return ERROR_END_OF_STREAM;
}
// If this source has detected near end, give it some time to retrieve more
- // data before signaling EOS
+ // data before returning EOS
+ int64_t mediaDurationUs = 0;
+ getDuration(&mediaDurationUs);
if (source->isFinished(mediaDurationUs)) {
int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
if (eosTimeout == 0) {
setEOSTimeout(audio, ALooper::GetNowUs());
} else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
setEOSTimeout(audio, 0);
- source->signalEOS(ERROR_END_OF_STREAM);
return ERROR_END_OF_STREAM;
}
return -EWOULDBLOCK;
}
- if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
+ if (!sourceNearEOS(!audio)) {
// We should not enter buffering mode
// if any of the sources already have detected EOS.
startBufferingIfNecessary();
@@ -306,6 +301,7 @@
mState = SEEKING;
mHandler->seek(seekTimeUs);
+ mEOSPending = false;
}
void NuPlayer::RTSPSource::schedulePollBuffering() {
@@ -314,10 +310,10 @@
}
void NuPlayer::RTSPSource::checkBuffering(
- bool *prepared, bool *underflow, bool *overflow, bool *startServer) {
+ bool *prepared, bool *underflow, bool *overflow, bool *startServer, bool *finished) {
size_t numTracks = mTracks.size();
- size_t preparedCount, underflowCount, overflowCount, startCount;
- preparedCount = underflowCount = overflowCount = startCount = 0;
+ size_t preparedCount, underflowCount, overflowCount, startCount, finishedCount;
+ preparedCount = underflowCount = overflowCount = startCount = finishedCount = 0;
size_t count = numTracks;
for (size_t i = 0; i < count; ++i) {
@@ -337,6 +333,7 @@
if (src->isFinished(/* duration */ 0)) {
++overflowCount;
+ ++finishedCount;
} else {
if (bufferedDurationUs < kUnderflowMarkUs) {
++underflowCount;
@@ -354,11 +351,12 @@
*underflow = (underflowCount > 0);
*overflow = (overflowCount == numTracks);
*startServer = (startCount > 0);
+ *finished = (finishedCount > 0);
}
void NuPlayer::RTSPSource::onPollBuffering() {
- bool prepared, underflow, overflow, startServer;
- checkBuffering(&prepared, &underflow, &overflow, &startServer);
+ bool prepared, underflow, overflow, startServer, finished;
+ checkBuffering(&prepared, &underflow, &overflow, &startServer, &finished);
if (prepared && mInPreparationPhase) {
mInPreparationPhase = false;
@@ -369,8 +367,11 @@
startBufferingIfNecessary();
}
- if (overflow && mHandler != NULL) {
+ if (haveSufficientDataOnAllTracks()) {
stopBufferingIfNecessary();
+ }
+
+ if (overflow && mHandler != NULL) {
mHandler->pause();
}
@@ -378,9 +379,72 @@
mHandler->resume();
}
+ if (finished && mHandler != NULL) {
+ mHandler->cancelAccessUnitTimeoutCheck();
+ }
+
schedulePollBuffering();
}
+void NuPlayer::RTSPSource::signalSourceEOS(status_t result) {
+ const bool audio = true;
+ const bool video = false;
+
+ sp<AnotherPacketSource> source = getSource(audio);
+ if (source != NULL) {
+ source->signalEOS(result);
+ }
+
+ source = getSource(video);
+ if (source != NULL) {
+ source->signalEOS(result);
+ }
+}
+
+bool NuPlayer::RTSPSource::sourceReachedEOS(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
+ status_t finalResult;
+ return (source != NULL &&
+ !source->hasBufferAvailable(&finalResult) &&
+ finalResult == ERROR_END_OF_STREAM);
+}
+
+bool NuPlayer::RTSPSource::sourceNearEOS(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
+ int64_t mediaDurationUs = 0;
+ getDuration(&mediaDurationUs);
+ return (source != NULL && source->isFinished(mediaDurationUs));
+}
+
+void NuPlayer::RTSPSource::onSignalEOS(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mSeekGeneration) {
+ return;
+ }
+
+ if (mEOSPending) {
+ signalSourceEOS(ERROR_END_OF_STREAM);
+ mEOSPending = false;
+ }
+}
+
+void NuPlayer::RTSPSource::postSourceEOSIfNecessary() {
+ const bool audio = true;
+ const bool video = false;
+ // If a source has detected near end, give it some time to retrieve more
+ // data before signaling EOS
+ if (sourceNearEOS(audio) || sourceNearEOS(video)) {
+ if (!mEOSPending) {
+ sp<AMessage> msg = new AMessage(kWhatSignalEOS, this);
+ msg->setInt32("generation", mSeekGeneration);
+ msg->post(kNearEOSTimeoutUs);
+ mEOSPending = true;
+ }
+ }
+}
+
void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
if (msg->what() == kWhatDisconnect) {
sp<AReplyToken> replyID;
@@ -408,6 +472,9 @@
} else if (msg->what() == kWhatPollBuffering) {
onPollBuffering();
return;
+ } else if (msg->what() == kWhatSignalEOS) {
+ onSignalEOS(msg);
+ return;
}
CHECK_EQ(msg->what(), (int)kWhatNotify);
@@ -517,16 +584,10 @@
}
if (err != OK) {
- sp<AnotherPacketSource> source = getSource(false /* audio */);
- if (source != NULL) {
- source->signalEOS(err);
- }
-
- source = getSource(true /* audio */);
- if (source != NULL) {
- source->signalEOS(err);
- }
+ signalSourceEOS(err);
}
+
+ postSourceEOSIfNecessary();
break;
}
@@ -554,6 +615,7 @@
source->queueAccessUnit(accessUnit);
}
+ postSourceEOSIfNecessary();
break;
}
@@ -564,17 +626,7 @@
CHECK_NE(finalResult, (status_t)OK);
if (mTSParser != NULL) {
- sp<AnotherPacketSource> source = getSource(false /* audio */);
- if (source != NULL) {
- source->signalEOS(finalResult);
- }
-
- source = getSource(true /* audio */);
- if (source != NULL) {
- source->signalEOS(finalResult);
- }
-
- return;
+ signalSourceEOS(finalResult);
}
size_t trackIndex;
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index a6a7644..c7834ef 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -64,6 +64,7 @@
kWhatDisconnect = 'disc',
kWhatPerformSeek = 'seek',
kWhatPollBuffering = 'poll',
+ kWhatSignalEOS = 'eos ',
};
enum State {
@@ -106,6 +107,7 @@
Mutex mBufferingLock;
bool mBuffering;
bool mInPreparationPhase;
+ bool mEOSPending;
sp<ALooper> mLooper;
sp<MyHandler> mHandler;
@@ -133,7 +135,12 @@
void performSeek(int64_t seekTimeUs);
void schedulePollBuffering();
- void checkBuffering(bool *prepared, bool *underflow, bool *overflow, bool *startServer);
+ void checkBuffering(
+ bool *prepared,
+ bool *underflow,
+ bool *overflow,
+ bool *startServer,
+ bool *finished);
void onPollBuffering();
bool haveSufficientDataOnAllTracks();
@@ -144,6 +151,13 @@
bool stopBufferingIfNecessary();
void finishSeek(status_t err);
+ void postSourceEOSIfNecessary();
+ void signalSourceEOS(status_t result);
+ void onSignalEOS(const sp<AMessage> &msg);
+
+ bool sourceNearEOS(bool audio);
+ bool sourceReachedEOS(bool audio);
+
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index b111c28..be5067d 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -158,11 +158,14 @@
// TODO: Use Flexible color instead
videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
- // For the thumbnail extraction case, try to allocate single buffer
- // in both input and output ports. NOTE: This request may fail if
- // component requires more than that for decoding.
- videoFormat->setInt32("android._num-input-buffers", 1);
- videoFormat->setInt32("android._num-output-buffers", 1);
+ // For the thumbnail extraction case, try to allocate single buffer in both
+ // input and output ports, if seeking to a sync frame. NOTE: This request may
+ // fail if component requires more than that for decoding.
+ bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST);
+ if (!isSeekingClosest) {
+ videoFormat->setInt32("android._num-input-buffers", 1);
+ videoFormat->setInt32("android._num-output-buffers", 1);
+ }
status_t err;
sp<ALooper> looper = new ALooper;
@@ -254,7 +257,6 @@
bool isAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
|| !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
- bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST);
bool firstSample = true;
int64_t targetTimeUs = -1ll;
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 42a1182..845131a 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1408,6 +1408,11 @@
msg->post((mKeepAliveTimeoutUs * 9) / 10);
}
+ void cancelAccessUnitTimeoutCheck() {
+ ALOGV("cancelAccessUnitTimeoutCheck");
+ ++mCheckGeneration;
+ }
+
void postAccessUnitTimeoutCheck() {
if (mCheckPending) {
return;
@@ -1792,14 +1797,8 @@
// Time is now established, lets start timestamping immediately
for (size_t i = 0; i < mTracks.size(); ++i) {
- TrackInfo *trackInfo = &mTracks.editItemAt(i);
- while (!trackInfo->mPackets.empty()) {
- sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
- trackInfo->mPackets.erase(trackInfo->mPackets.begin());
-
- if (addMediaTimestamp(i, trackInfo, accessUnit)) {
- postQueueAccessUnit(i, accessUnit);
- }
+ if (OK != processAccessUnitQueue(i)) {
+ return;
}
}
for (size_t i = 0; i < mTracks.size(); ++i) {
@@ -1812,26 +1811,8 @@
}
}
- void onAccessUnitComplete(
- int32_t trackIndex, const sp<ABuffer> &accessUnit) {
- ALOGV("onAccessUnitComplete track %d", trackIndex);
-
+ status_t processAccessUnitQueue(int32_t trackIndex) {
TrackInfo *track = &mTracks.editItemAt(trackIndex);
- if(!mPlayResponseParsed){
- uint32_t seqNum = (uint32_t)accessUnit->int32Data();
- ALOGI("play response is not parsed, storing accessunit %u", seqNum);
- track->mPackets.push_back(accessUnit);
- return;
- }
-
- handleFirstAccessUnit();
-
- if (!mAllTracksHaveTime) {
- ALOGV("storing accessUnit, no time established yet");
- track->mPackets.push_back(accessUnit);
- return;
- }
-
while (!track->mPackets.empty()) {
sp<ABuffer> accessUnit = *track->mPackets.begin();
track->mPackets.erase(track->mPackets.begin());
@@ -1862,7 +1843,7 @@
"Still no first rtp packet after %d stale ones",
kMaxAllowedStaleAccessUnits);
track->mAllowedStaleAccessUnits = -1;
- return;
+ return UNKNOWN_ERROR;
}
// Now found the first rtp packet of the stream after seeking.
@@ -1876,14 +1857,35 @@
continue;
}
-
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
postQueueAccessUnit(trackIndex, accessUnit);
}
}
+ return OK;
+ }
- if (addMediaTimestamp(trackIndex, track, accessUnit)) {
- postQueueAccessUnit(trackIndex, accessUnit);
+ void onAccessUnitComplete(
+ int32_t trackIndex, const sp<ABuffer> &accessUnit) {
+ TrackInfo *track = &mTracks.editItemAt(trackIndex);
+ track->mPackets.push_back(accessUnit);
+
+ uint32_t seqNum = (uint32_t)accessUnit->int32Data();
+ ALOGV("onAccessUnitComplete track %d storing accessunit %u", trackIndex, seqNum);
+
+ if(!mPlayResponseParsed){
+ ALOGV("play response is not parsed");
+ return;
+ }
+
+ handleFirstAccessUnit();
+
+ if (!mAllTracksHaveTime) {
+ ALOGV("storing accessUnit, no time established yet");
+ return;
+ }
+
+ if (OK != processAccessUnitQueue(trackIndex)) {
+ return;
}
if (track->mEOSReceived) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index b752541..1ddfb4d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1321,9 +1321,11 @@
desc->isActive() &&
outputDesc->sharesHwModuleWith(desc) &&
(newDevice != desc->device())) {
+ audio_devices_t newDevice2 = getNewOutputDevice(desc, false /*fromCache*/);
+ bool force = desc->device() != newDevice2;
setOutputDevice(desc,
- getNewOutputDevice(desc, false /*fromCache*/),
- true,
+ newDevice2,
+ force,
outputDesc->latency()*2);
}
}