media: Support accurate pause/resume/stop in GraphicBufferSource.
Process all buffers that have been queued to the encoder on
StagefrightRecorder::pause()/stop(). Furthermore, add timestamp support for
pause/stop. Buffers willi keep getting encoded until 1) a buffer with a higher
timestamp is queued, or 2) the bufferqueue is abandoned by the producer.
Test: Recording with hacking GoogleCamera timestamp. And Mediarecorder CTS test.
Bug:32765698
Change-Id: I9ec33d635aef269836d7a5d9f4b906cb41b46a0d
diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h
index f9a46a9..5e99b78 100644
--- a/include/media/stagefright/MediaCodecSource.h
+++ b/include/media/stagefright/MediaCodecSource.h
@@ -54,7 +54,7 @@
// MediaSource
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
- virtual status_t pause();
+ virtual status_t pause(MetaData *params = NULL);
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer,
@@ -66,6 +66,12 @@
// for AHandlerReflector
void onMessageReceived(const sp<AMessage> &msg);
+ // Set GraphicBufferSource stop time. GraphicBufferSource will stop
+ // after receiving a buffer with timestamp larger or equal than stopTimeUs.
+ // All the buffers with timestamp larger or equal to stopTimeUs will be
+ // discarded. stopTimeUs uses SYSTEM_TIME_MONOTONIC time base.
+ status_t setStopStimeUs(int64_t stopTimeUs);
+
protected:
virtual ~MediaCodecSource();
@@ -79,6 +85,7 @@
kWhatStop,
kWhatPause,
kWhatSetInputBufferTimeOffset,
+ kWhatSetStopTimeOffset,
kWhatGetFirstSampleSystemTimeUs,
kWhatStopStalled,
};
@@ -91,13 +98,23 @@
uint32_t flags = 0);
status_t onStart(MetaData *params);
- void onPause();
+
+ // Pause the source at pauseStartTimeUs. For non-surface input,
+ // buffers will be dropped immediately. For surface input, buffers
+ // with timestamp smaller than pauseStartTimeUs will still be encoded.
+ // Buffers with timestamp larger or queal to pauseStartTimeUs will be
+ // dropped. pauseStartTimeUs uses SYSTEM_TIME_MONOTONIC time base.
+ void onPause(int64_t pauseStartTimeUs);
+
status_t init();
status_t initEncoder();
void releaseEncoder();
status_t feedEncoderInputBuffers();
- void suspend();
- void resume(int64_t skipFramesBeforeUs = -1ll);
+ // Resume GraphicBufferSource at resumeStartTimeUs. Buffers
+ // from GraphicBufferSource with timestamp larger or equal to
+ // resumeStartTimeUs will be encoded. resumeStartTimeUs uses
+ // SYSTEM_TIME_MONOTONIC time base.
+ void resume(int64_t resumeStartTimeUs = -1ll);
void signalEOS(status_t err = ERROR_END_OF_STREAM);
bool reachedEOS();
status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
diff --git a/media/libmedia/aidl/android/IGraphicBufferSource.aidl b/media/libmedia/aidl/android/IGraphicBufferSource.aidl
index a8dd309..325c631 100644
--- a/media/libmedia/aidl/android/IGraphicBufferSource.aidl
+++ b/media/libmedia/aidl/android/IGraphicBufferSource.aidl
@@ -25,11 +25,12 @@
*/
interface IGraphicBufferSource {
void configure(IOMXNode omxNode, int dataSpace);
- void setSuspend(boolean suspend);
+ void setSuspend(boolean suspend, long suspendTimeUs);
void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
void setMaxFps(float maxFps);
void setTimeLapseConfig(long timePerFrameUs, long timePerCaptureUs);
void setStartTimeUs(long startTimeUs);
+ void setStopTimeUs(long stopTimeUs);
void setColorAspects(int aspects);
void setTimeOffsetUs(long timeOffsetsUs);
void signalEndOfInputStream();
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index d00e377..170659a 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -1812,15 +1812,17 @@
return OK;
}
+ mPauseStartTimeUs = systemTime() / 1000;
+ sp<MetaData> meta = new MetaData;
+ meta->setInt64(kKeyTime, mPauseStartTimeUs);
+
if (mAudioEncoderSource != NULL) {
mAudioEncoderSource->pause();
}
if (mVideoEncoderSource != NULL) {
- mVideoEncoderSource->pause();
+ mVideoEncoderSource->pause(meta.get());
}
- mPauseStartTimeUs = systemTime() / 1000;
-
return OK;
}
@@ -1835,6 +1837,8 @@
return OK;
}
+ int64_t resumeStartTimeUs = systemTime() / 1000;
+
int64_t bufferStartTimeUs = 0;
bool allSourcesStarted = true;
for (const auto &source : { mAudioEncoderSource, mVideoEncoderSource }) {
@@ -1855,18 +1859,20 @@
mPauseStartTimeUs = bufferStartTimeUs;
}
// 30 ms buffer to avoid timestamp overlap
- mTotalPausedDurationUs += (systemTime() / 1000) - mPauseStartTimeUs - 30000;
+ mTotalPausedDurationUs += resumeStartTimeUs - mPauseStartTimeUs - 30000;
}
double timeOffset = -mTotalPausedDurationUs;
if (mCaptureFpsEnable) {
timeOffset *= mCaptureFps / mFrameRate;
}
+ sp<MetaData> meta = new MetaData;
+ meta->setInt64(kKeyTime, resumeStartTimeUs);
for (const auto &source : { mAudioEncoderSource, mVideoEncoderSource }) {
if (source == nullptr) {
continue;
}
source->setInputBufferTimeOffset((int64_t)timeOffset);
- source->start();
+ source->start(meta.get());
}
mPauseStartTimeUs = 0;
@@ -1883,6 +1889,12 @@
mCameraSourceTimeLapse = NULL;
}
+ if (mVideoEncoderSource != NULL) {
+ int64_t stopTimeUs = systemTime() / 1000;
+ sp<MetaData> meta = new MetaData;
+ err = mVideoEncoderSource->setStopStimeUs(stopTimeUs);
+ }
+
if (mWriter != NULL) {
err = mWriter->stop();
mWriter.clear();
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 3235e81..ebd7f89 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -6553,7 +6553,7 @@
if (mCodec->mCreateInputBuffersSuspended) {
err = statusFromBinderStatus(
- mCodec->mGraphicBufferSource->setSuspend(true));
+ mCodec->mGraphicBufferSource->setSuspend(true, -1));
if (err != OK) {
ALOGE("[%s] Unable to configure option to suspend (err %d)",
@@ -7117,8 +7117,10 @@
return INVALID_OPERATION;
}
+ int64_t suspendStartTimeUs = -1;
+ (void) params->findInt64("drop-start-time-us", &suspendStartTimeUs);
status_t err = statusFromBinderStatus(
- mGraphicBufferSource->setSuspend(dropInputFrames != 0));
+ mGraphicBufferSource->setSuspend(dropInputFrames != 0, suspendStartTimeUs));
if (err != OK) {
ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err);
@@ -7126,6 +7128,22 @@
}
}
+ int64_t stopTimeUs;
+ if (params->findInt64("stop-time-us", &stopTimeUs)) {
+ if (mGraphicBufferSource == NULL) {
+ ALOGE("[%s] Invalid to set stop time without surface",
+ mComponentName.c_str());
+ return INVALID_OPERATION;
+ }
+ status_t err = statusFromBinderStatus(
+ mGraphicBufferSource->setStopTimeUs(stopTimeUs));
+
+ if (err != OK) {
+ ALOGE("Failed to set parameter 'stop-time-us' (err %d)", err);
+ return err;
+ }
+ }
+
int32_t dummy;
if (params->findInt32("request-sync", &dummy)) {
status_t err = requestIDRFrame();
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index 5981b35..059a730 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -363,8 +363,20 @@
return postSynchronouslyAndReturnError(msg);
}
-status_t MediaCodecSource::pause() {
- (new AMessage(kWhatPause, mReflector))->post();
+
+status_t MediaCodecSource::setStopStimeUs(int64_t stopTimeUs) {
+ if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+ return OK;
+ }
+ sp<AMessage> msg = new AMessage(kWhatSetStopTimeOffset, mReflector);
+ msg->setInt64("stop-time-us", stopTimeUs);
+ return postSynchronouslyAndReturnError(msg);
+}
+
+status_t MediaCodecSource::pause(MetaData* params) {
+ sp<AMessage> msg = new AMessage(kWhatPause, mReflector);
+ msg->setObject("meta", params);
+ msg->post();
return OK;
}
@@ -624,22 +636,13 @@
}
}
-void MediaCodecSource::suspend() {
- CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
- if (mEncoder != NULL) {
- sp<AMessage> params = new AMessage;
- params->setInt32("drop-input-frames", true);
- mEncoder->setParameters(params);
- }
-}
-
-void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
+void MediaCodecSource::resume(int64_t resumeStartTimeUs) {
CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
if (mEncoder != NULL) {
sp<AMessage> params = new AMessage;
params->setInt32("drop-input-frames", false);
- if (skipFramesBeforeUs > 0) {
- params->setInt64("skip-frames-before", skipFramesBeforeUs);
+ if (resumeStartTimeUs > 0) {
+ params->setInt64("drop-start-time-us", resumeStartTimeUs);
}
mEncoder->setParameters(params);
}
@@ -661,7 +664,7 @@
mFirstSampleSystemTimeUs = systemTime() / 1000;
if (mPausePending) {
mPausePending = false;
- onPause();
+ onPause(mFirstSampleSystemTimeUs);
mbuf->release();
mAvailEncoderInputIndices.push_back(bufferIndex);
return OK;
@@ -728,6 +731,10 @@
ALOGE("Failed to start while we're stopping");
return INVALID_OPERATION;
}
+ int64_t startTimeUs;
+ if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
+ startTimeUs = -1ll;
+ }
if (mStarted) {
ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
@@ -739,7 +746,7 @@
mEncoder->requestIDRFrame();
}
if (mFlags & FLAG_USE_SURFACE_INPUT) {
- resume();
+ resume(startTimeUs);
} else {
CHECK(mPuller != NULL);
mPuller->resume();
@@ -752,11 +759,14 @@
status_t err = OK;
if (mFlags & FLAG_USE_SURFACE_INPUT) {
- int64_t startTimeUs;
- if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
- startTimeUs = -1ll;
+ if (mEncoder != NULL) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("drop-input-frames", false);
+ if (startTimeUs >= 0) {
+ params->setInt64("skip-frames-before", startTimeUs);
+ }
+ mEncoder->setParameters(params);
}
- resume(startTimeUs);
} else {
CHECK(mPuller != NULL);
sp<MetaData> meta = params;
@@ -781,9 +791,12 @@
return OK;
}
-void MediaCodecSource::onPause() {
- if (mFlags & FLAG_USE_SURFACE_INPUT) {
- suspend();
+void MediaCodecSource::onPause(int64_t pauseStartTimeUs) {
+ if ((mFlags & FLAG_USE_SURFACE_INPUT) && (mEncoder != NULL)) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("drop-input-frames", true);
+ params->setInt64("drop-start-time-us", pauseStartTimeUs);
+ mEncoder->setParameters(params);
} else {
CHECK(mPuller != NULL);
mPuller->pause();
@@ -871,7 +884,7 @@
mFirstSampleSystemTimeUs = systemTime() / 1000;
if (mPausePending) {
mPausePending = false;
- onPause();
+ onPause(mFirstSampleSystemTimeUs);
mbuf->release();
break;
}
@@ -1000,6 +1013,7 @@
ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
}
signalEOS();
+ break;
}
case kWhatPause:
@@ -1007,7 +1021,14 @@
if (mFirstSampleSystemTimeUs < 0) {
mPausePending = true;
} else {
- onPause();
+ sp<RefBase> obj;
+ CHECK(msg->findObject("meta", &obj));
+ MetaData *params = static_cast<MetaData *>(obj.get());
+ int64_t pauseStartTimeUs = -1;
+ if (params == NULL || !params->findInt64(kKeyTime, &pauseStartTimeUs)) {
+ pauseStartTimeUs = -1ll;
+ }
+ onPause(pauseStartTimeUs);
}
break;
}
@@ -1030,6 +1051,26 @@
response->postReply(replyID);
break;
}
+ case kWhatSetStopTimeOffset:
+ {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ status_t err = OK;
+ int64_t stopTimeUs;
+ CHECK(msg->findInt64("stop-time-us", &stopTimeUs));
+
+ // Propagate the timestamp offset to GraphicBufferSource.
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ sp<AMessage> params = new AMessage;
+ params->setInt64("stop-time-us", stopTimeUs);
+ err = mEncoder->setParameters(params);
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
case kWhatGetFirstSampleSystemTimeUs:
{
sp<AReplyToken> replyID;
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index a0ddc28..2f457ac 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -76,11 +76,13 @@
mInitCheck(UNKNOWN_ERROR),
mExecuting(false),
mSuspended(false),
+ mStopTimeUs(-1),
mLastDataSpace(HAL_DATASPACE_UNKNOWN),
mNumFramesAvailable(0),
mNumBufferAcquired(0),
mEndOfStream(false),
mEndOfStreamSent(false),
+ mLastActionTimeUs(-1ll),
mPrevOriginalTimeUs(-1ll),
mSkipFramesBeforeNs(-1ll),
mRepeatAfterUs(-1ll),
@@ -171,7 +173,7 @@
// If EOS has already been signaled, and there are no more frames to
// submit, try to send EOS now as well.
- if (mEndOfStream && mNumFramesAvailable == 0) {
+ if (mStopTimeUs == -1 && mEndOfStream && mNumFramesAvailable == 0) {
submitEndOfInputStream_l();
}
@@ -348,8 +350,8 @@
ALOGV("buffer freed, %zu frames avail (eos=%d)",
mNumFramesAvailable, mEndOfStream);
fillCodecBuffer_l();
- } else if (mEndOfStream) {
- // No frames available, but EOS is pending, so use this buffer to
+ } else if (mEndOfStream && mStopTimeUs == -1) {
+ // No frames available, but EOS is pending and no stop time, so use this buffer to
// send that.
ALOGV("buffer freed, EOS pending");
submitEndOfInputStream_l();
@@ -387,7 +389,7 @@
bool GraphicBufferSource::fillCodecBuffer_l() {
CHECK(mExecuting && mNumFramesAvailable > 0);
- if (mSuspended) {
+ if (mSuspended && mActionQueue.empty()) {
return false;
}
@@ -408,8 +410,85 @@
return false;
}
+ int64_t itemTimeUs = item.mTimestamp / 1000;
+
mNumFramesAvailable--;
+ // Process ActionItem in the Queue if there is any. If a buffer's timestamp
+ // is smaller than the first action's timestamp, no action need to be performed.
+ // If buffer's timestamp is larger or equal than the last action's timestamp,
+ // only the last action needs to be performed as all the acitions before the
+ // the action are overridden by the last action. For the other cases, traverse
+ // the Queue to find the newest action that with timestamp smaller or equal to
+ // the buffer's timestamp. For example, an action queue like
+ // [pause, 1s], [resume 2us], [pause 3us], [resume 4us], [pause 5us].... Upon
+ // receiving a buffer with timestamp 3.5us, only the action [pause, 3us] needs
+ // to be handled and [pause, 1us], [resume 2us] will be discarded.
+ bool dropped = false;
+ bool done = false;
+ if (!mActionQueue.empty()) {
+ // First scan to check if bufferTimestamp is smaller than first action's timestamp.
+ ActionItem nextAction = *(mActionQueue.begin());
+ if (itemTimeUs < nextAction.mActionTimeUs) {
+ ALOGV("No action. buffer timestamp %lld us < action timestamp: %lld us",
+ (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+ // All the actions are ahead. No action need to perform now.
+ // Release the buffer if is in suspended state, or process the buffer
+ // if not in suspended state.
+ dropped = mSuspended;
+ done = true;
+ }
+
+ if (!done) {
+ List<ActionItem>::iterator it = mActionQueue.begin();
+ while(it != mActionQueue.end()) {
+ nextAction = *it;
+ mActionQueue.erase(it);
+ if (nextAction.mActionTimeUs > itemTimeUs) {
+ break;
+ }
+ ++it;
+ }
+
+ CHECK(itemTimeUs >= nextAction.mActionTimeUs);
+ switch (nextAction.mAction) {
+ case ActionItem::PAUSE:
+ {
+ mSuspended = true;
+ dropped = true;
+ ALOGV("RUNNING/PAUSE -> PAUSE at buffer %lld us PAUSE Time: %lld us",
+ (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+ break;
+ }
+ case ActionItem::RESUME:
+ {
+ mSuspended = false;
+ ALOGV("PAUSE/RUNNING -> RUNNING at buffer %lld us RESUME Time: %lld us",
+ (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+ break;
+ }
+ case ActionItem::STOP:
+ {
+ ALOGV("RUNNING/PAUSE -> STOP at buffer %lld us STOP Time: %lld us",
+ (long long)itemTimeUs, (long long)nextAction.mActionTimeUs);
+ dropped = true;
+ // Clear the whole ActionQueue as recording is done
+ mActionQueue.clear();
+ submitEndOfInputStream_l();
+ break;
+ }
+ default:
+ ALOGE("Unknown action type");
+ return false;
+ }
+ }
+ }
+
+ if (dropped) {
+ releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence);
+ return true;
+ }
+
if (item.mDataSpace != mLastDataSpace) {
onDataSpaceChanged_l(
item.mDataSpace, (android_pixel_format)mBufferSlot[item.mSlot]->getPixelFormat());
@@ -419,7 +498,6 @@
// only submit sample if start time is unspecified, or sample
// is queued after the specified start time
- bool dropped = false;
if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {
// if start time is set, offset time stamp by start time
if (mSkipFramesBeforeNs > 0) {
@@ -719,12 +797,12 @@
ALOGV("onFrameAvailable exec=%d avail=%zu",
mExecuting, mNumFramesAvailable);
- if (mOMXNode == NULL || mEndOfStream || mSuspended) {
- if (mEndOfStream) {
+ if (mOMXNode == NULL || mEndOfStreamSent || (mSuspended && mActionQueue.empty())) {
+ if (mEndOfStreamSent) {
// This should only be possible if a new buffer was queued after
// EOS was signaled, i.e. the app is misbehaving.
- ALOGW("onFrameAvailable: EOS is set, ignoring frame");
+ ALOGW("onFrameAvailable: EOS is sent, ignoring frame");
} else {
ALOGV("onFrameAvailable: suspended, ignoring frame");
}
@@ -875,44 +953,74 @@
mPrevCaptureUs = -1ll;
mPrevFrameUs = -1ll;
mInputBufferTimeOffsetUs = 0;
+ mStopTimeUs = -1;
+ mActionQueue.clear();
}
return Status::ok();
}
-Status GraphicBufferSource::setSuspend(bool suspend) {
- ALOGV("setSuspend=%d", suspend);
+Status GraphicBufferSource::setSuspend(bool suspend, int64_t suspendStartTimeUs) {
+ ALOGV("setSuspend=%d at time %lld us", suspend, (long long)suspendStartTimeUs);
Mutex::Autolock autoLock(mMutex);
- if (suspend) {
- mSuspended = true;
-
- while (mNumFramesAvailable > 0) {
- BufferItem item;
- status_t err = acquireBuffer(&item);
-
- if (err != OK) {
- ALOGE("setSuspend: acquireBuffer returned err=%d", err);
- break;
- }
-
- --mNumFramesAvailable;
-
- releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence);
- }
- return Status::ok();
+ if (mStopTimeUs != -1) {
+ ALOGE("setSuspend failed as STOP action is pending");
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
}
- mSuspended = false;
+ // Push the action to the queue.
+ if (suspendStartTimeUs != -1) {
+ // suspendStartTimeUs must be smaller or equal to current systemTime.
+ int64_t currentSystemTimeUs = systemTime() / 1000;
+ if (suspendStartTimeUs > currentSystemTimeUs) {
+ ALOGE("setSuspend failed. %lld is larger than current system time %lld us",
+ (long long)suspendStartTimeUs, (long long)currentSystemTimeUs);
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
+ }
+ if (mLastActionTimeUs != -1 && suspendStartTimeUs < mLastActionTimeUs) {
+ ALOGE("setSuspend failed. %lld is smaller than last action time %lld us",
+ (long long)suspendStartTimeUs, (long long)mLastActionTimeUs);
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
+ }
+ mLastActionTimeUs = suspendStartTimeUs;
+ ActionItem action;
+ action.mAction = suspend ? ActionItem::PAUSE : ActionItem::RESUME;
+ action.mActionTimeUs = suspendStartTimeUs;
+ ALOGV("Push %s action into actionQueue", suspend ? "PAUSE" : "RESUME");
+ mActionQueue.push_back(action);
+ } else {
+ if (suspend) {
+ mSuspended = true;
- if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) {
- if (repeatLatestBuffer_l()) {
- ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");
+ while (mNumFramesAvailable > 0) {
+ BufferItem item;
+ status_t err = acquireBuffer(&item);
- mRepeatBufferDeferred = false;
+ if (err != OK) {
+ ALOGE("setSuspend: acquireBuffer returned err=%d", err);
+ break;
+ }
+
+ --mNumFramesAvailable;
+
+ releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence);
+ }
+ return Status::ok();
} else {
- ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");
+
+ mSuspended = false;
+
+ if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) {
+ if (repeatLatestBuffer_l()) {
+ ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");
+
+ mRepeatBufferDeferred = false;
+ } else {
+ ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");
+ }
+ }
}
}
return Status::ok();
@@ -973,6 +1081,36 @@
return Status::ok();
}
+Status GraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+ ALOGV("setStopTimeUs: %lld us", (long long)stopTimeUs);
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mStopTimeUs != -1) {
+ // Ignore if stop time has already been set
+ return Status::ok();
+ }
+
+ // stopTimeUs must be smaller or equal to current systemTime.
+ int64_t currentSystemTimeUs = systemTime() / 1000;
+ if (stopTimeUs > currentSystemTimeUs) {
+ ALOGE("setStopTimeUs failed. %lld is larger than current system time %lld us",
+ (long long)stopTimeUs, (long long)currentSystemTimeUs);
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
+ }
+ if (mLastActionTimeUs != -1 && stopTimeUs < mLastActionTimeUs) {
+ ALOGE("setSuspend failed. %lld is smaller than last action time %lld us",
+ (long long)stopTimeUs, (long long)mLastActionTimeUs);
+ return Status::fromServiceSpecificError(INVALID_OPERATION);
+ }
+ mLastActionTimeUs = stopTimeUs;
+ ActionItem action;
+ action.mAction = ActionItem::STOP;
+ action.mActionTimeUs = stopTimeUs;
+ mActionQueue.push_back(action);
+ mStopTimeUs = stopTimeUs;
+ return Status::ok();
+}
+
Status GraphicBufferSource::setTimeLapseConfig(int64_t timePerFrameUs, int64_t timePerCaptureUs) {
ALOGV("setTimeLapseConfig: timePerFrameUs=%lld, timePerCaptureUs=%lld",
(long long)timePerFrameUs, (long long)timePerCaptureUs);
@@ -1013,15 +1151,15 @@
// Set the end-of-stream flag. If no frames are pending from the
// BufferQueue, and a codec buffer is available, and we're executing,
- // we initiate the EOS from here. Otherwise, we'll let
- // codecBufferEmptied() (or omxExecuting) do it.
+ // and there is no stop timestamp, we initiate the EOS from here.
+ // Otherwise, we'll let codecBufferEmptied() (or omxExecuting) do it.
//
// Note: if there are no pending frames and all codec buffers are
// available, we *must* submit the EOS from here or we'll just
// stall since no future events are expected.
mEndOfStream = true;
- if (mExecuting && mNumFramesAvailable == 0) {
+ if (mStopTimeUs == -1 && mExecuting && mNumFramesAvailable == 0) {
submitEndOfInputStream_l();
}
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 153a035..475548e 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -101,14 +101,18 @@
// data space.
Status configure(const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
- // This is called after the last input frame has been submitted. We
- // need to submit an empty buffer with the EOS flag set. If we don't
- // have a codec buffer ready, we just set the mEndOfStream flag.
+ // This is called after the last input frame has been submitted or buffer
+ // timestamp is greater or equal than stopTimeUs. We need to submit an empty
+ // buffer with the EOS flag set. If we don't have a codec buffer ready,
+ // we just set the mEndOfStream flag.
Status signalEndOfInputStream() override;
// If suspend is true, all incoming buffers (including those currently
- // in the BufferQueue) will be discarded until the suspension is lifted.
- Status setSuspend(bool suspend) override;
+ // in the BufferQueue) with timestamp larger than timeUs will be discarded
+ // until the suspension is lifted. If suspend is false, all incoming buffers
+ // including those currently in the BufferQueue) with timestamp larger than
+ // timeUs will be processed. timeUs uses SYSTEM_TIME_MONOTONIC time base.
+ Status setSuspend(bool suspend, int64_t timeUs) override;
// Specifies the interval after which we requeue the buffer previously
// queued to the encoder. This is useful in the case of surface flinger
@@ -135,6 +139,10 @@
// be dropped and not submitted to encoder
Status setStartTimeUs(int64_t startTimeUs) override;
+ // Sets the stop time us (in system time), samples after which should be dropped
+ // and not submitted to encoder. timeUs uses SYSTEM_TIME_MONOTONIC time base.
+ Status setStopTimeUs(int64_t stopTimeUs) override;
+
// Sets the desired color aspects, e.g. to be used when producer does not specify a dataspace.
Status setColorAspects(int32_t aspectsPacked) override;
@@ -229,6 +237,9 @@
bool mSuspended;
+ // The time to stop sending buffers.
+ int64_t mStopTimeUs;
+
// Last dataspace seen
android_dataspace mLastDataSpace;
@@ -259,6 +270,25 @@
// Tracks codec buffers.
Vector<CodecBuffer> mCodecBuffers;
+ struct ActionItem {
+ typedef enum {
+ PAUSE,
+ RESUME,
+ STOP
+ } ActionType;
+ ActionType mAction;
+ int64_t mActionTimeUs;
+ };
+
+ // Maintain last action timestamp to ensure all the action timestamps are
+ // monotonically increasing.
+ int64_t mLastActionTimeUs;
+
+ // An action queue that queue up all the actions sent to GraphicBufferSource.
+ // STOP action should only show up at the end of the list as all the actions
+ // after a STOP action will be discarded. mActionQueue is protected by mMutex.
+ List<ActionItem> mActionQueue;
+
////
friend struct AHandlerReflector<GraphicBufferSource>;
diff --git a/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.cpp b/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.cpp
index 8ba2924..af9cf03 100644
--- a/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.cpp
@@ -39,8 +39,9 @@
new TWOmxNode(omxNode), toHardwareDataspace(dataSpace)));
}
-::android::binder::Status LWGraphicBufferSource::setSuspend(bool suspend) {
- return toBinderStatus(mBase->setSuspend(suspend));
+::android::binder::Status LWGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ return toBinderStatus(mBase->setSuspend(suspend, timeUs));
}
::android::binder::Status LWGraphicBufferSource::setRepeatPreviousFrameDelayUs(
@@ -63,6 +64,11 @@
return toBinderStatus(mBase->setStartTimeUs(startTimeUs));
}
+::android::binder::Status LWGraphicBufferSource::setStopTimeUs(
+ int64_t stopTimeUs) {
+ return toBinderStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
::android::binder::Status LWGraphicBufferSource::setColorAspects(
int32_t aspects) {
return toBinderStatus(mBase->setColorAspects(
@@ -89,8 +95,9 @@
return Void();
}
-Return<void> TWGraphicBufferSource::setSuspend(bool suspend) {
- mBase->setSuspend(suspend);
+Return<void> TWGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ mBase->setSuspend(suspend, timeUs);
return Void();
}
@@ -116,6 +123,10 @@
return Void();
}
+Return<void> TWGraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+ return toHardwareStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
Return<void> TWGraphicBufferSource::setColorAspects(
const ColorAspects& aspects) {
mBase->setColorAspects(toCompactColorAspects(aspects));
diff --git a/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.h b/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.h
index 69efdde..dd6168e 100644
--- a/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.h
+++ b/media/libstagefright/omx/hal/1.0/impl/WGraphicBufferSource.h
@@ -69,13 +69,14 @@
LWGraphicBufferSource(sp<TGraphicBufferSource> const& base);
::android::binder::Status configure(
const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
- ::android::binder::Status setSuspend(bool suspend) override;
+ ::android::binder::Status setSuspend(bool suspend, int64_t timeUs) override;
::android::binder::Status setRepeatPreviousFrameDelayUs(
int64_t repeatAfterUs) override;
::android::binder::Status setMaxFps(float maxFps) override;
::android::binder::Status setTimeLapseConfig(
int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
::android::binder::Status setStartTimeUs(int64_t startTimeUs) override;
+ ::android::binder::Status setStopTimeUs(int64_t stopTimeUs) override;
::android::binder::Status setColorAspects(int32_t aspects) override;
::android::binder::Status setTimeOffsetUs(int64_t timeOffsetsUs) override;
::android::binder::Status signalEndOfInputStream() override;
@@ -86,12 +87,13 @@
TWGraphicBufferSource(sp<LGraphicBufferSource> const& base);
Return<void> configure(
const sp<IOmxNode>& omxNode, Dataspace dataspace) override;
- Return<void> setSuspend(bool suspend) override;
+ Return<void> setSuspend(bool suspend, int64_t timeUs) override;
Return<void> setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
Return<void> setMaxFps(float maxFps) override;
Return<void> setTimeLapseConfig(
int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
Return<void> setStartTimeUs(int64_t startTimeUs) override;
+ Return<void> setStopTimeUs(int64_t stopTimeUs) override;
Return<void> setColorAspects(const ColorAspects& aspects) override;
Return<void> setTimeOffsetUs(int64_t timeOffsetUs) override;
Return<void> signalEndOfInputStream() override;
diff --git a/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.cpp b/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.cpp
index a23b48a..afe8bc5 100644
--- a/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.cpp
+++ b/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.cpp
@@ -39,8 +39,9 @@
new TWOmxNode(omxNode), toHardwareDataspace(dataSpace)));
}
-::android::binder::Status LWGraphicBufferSource::setSuspend(bool suspend) {
- return toBinderStatus(mBase->setSuspend(suspend));
+::android::binder::Status LWGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ return toBinderStatus(mBase->setSuspend(suspend, timeUs));
}
::android::binder::Status LWGraphicBufferSource::setRepeatPreviousFrameDelayUs(
@@ -63,6 +64,11 @@
return toBinderStatus(mBase->setStartTimeUs(startTimeUs));
}
+::android::binder::Status LWGraphicBufferSource::setStopTimeUs(
+ int64_t stopTimeUs) {
+ return toBinderStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
::android::binder::Status LWGraphicBufferSource::setColorAspects(
int32_t aspects) {
return toBinderStatus(mBase->setColorAspects(
@@ -89,8 +95,8 @@
return Void();
}
-Return<void> TWGraphicBufferSource::setSuspend(bool suspend) {
- mBase->setSuspend(suspend);
+Return<void> TWGraphicBufferSource::setSuspend(bool suspend, int64_t timeUs) {
+ mBase->setSuspend(suspend, timeUs);
return Void();
}
@@ -116,6 +122,10 @@
return Void();
}
+Return<void> TWGraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+ return toHardwareStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
Return<void> TWGraphicBufferSource::setColorAspects(
const ColorAspects& aspects) {
mBase->setColorAspects(toCompactColorAspects(aspects));
diff --git a/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.h b/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.h
index d21de42..1b09cbd 100644
--- a/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.h
+++ b/media/libstagefright/omx/hal/1.0/utils/WGraphicBufferSource.h
@@ -69,13 +69,14 @@
LWGraphicBufferSource(sp<TGraphicBufferSource> const& base);
::android::binder::Status configure(
const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
- ::android::binder::Status setSuspend(bool suspend) override;
+ ::android::binder::Status setSuspend(bool suspend, int64_t timeUs) override;
::android::binder::Status setRepeatPreviousFrameDelayUs(
int64_t repeatAfterUs) override;
::android::binder::Status setMaxFps(float maxFps) override;
::android::binder::Status setTimeLapseConfig(
int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
::android::binder::Status setStartTimeUs(int64_t startTimeUs) override;
+ ::android::binder::Status setStopTimeUs(int64_t stopTimeUs) override;
::android::binder::Status setColorAspects(int32_t aspects) override;
::android::binder::Status setTimeOffsetUs(int64_t timeOffsetsUs) override;
::android::binder::Status signalEndOfInputStream() override;
@@ -86,12 +87,13 @@
TWGraphicBufferSource(sp<LGraphicBufferSource> const& base);
Return<void> configure(
const sp<IOmxNode>& omxNode, Dataspace dataspace) override;
- Return<void> setSuspend(bool suspend) override;
+ Return<void> setSuspend(bool suspend, int64_t timeUs) override;
Return<void> setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
Return<void> setMaxFps(float maxFps) override;
Return<void> setTimeLapseConfig(
int64_t timePerFrameUs, int64_t timePerCaptureUs) override;
Return<void> setStartTimeUs(int64_t startTimeUs) override;
+ Return<void> setStopTimeUs(int64_t stopTimeUs) override;
Return<void> setColorAspects(const ColorAspects& aspects) override;
Return<void> setTimeOffsetUs(int64_t timeOffsetUs) override;
Return<void> signalEndOfInputStream() override;