Merge "Fix AudioTrack flush pointer wrap" into mnc-dev
diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index ffe974b..d2dc200 100644
--- a/camera/camera2/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -48,7 +48,8 @@
GET_CAMERA_INFO,
WAIT_UNTIL_IDLE,
FLUSH,
- PREPARE
+ PREPARE,
+ TEAR_DOWN
};
namespace {
@@ -365,6 +366,20 @@
return reply.readInt32();
}
+ virtual status_t tearDown(int streamId)
+ {
+ ALOGV("tearDown");
+ Parcel data, reply;
+
+ data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+ data.writeInt32(streamId);
+
+ remote()->transact(TEAR_DOWN, data, &reply);
+
+ reply.readExceptionCode();
+ return reply.readInt32();
+ }
+
private:
@@ -570,6 +585,13 @@
reply->writeInt32(prepare(streamId));
return NO_ERROR;
} break;
+ case TEAR_DOWN: {
+ CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+ int streamId = data.readInt32();
+ reply->writeNoException();
+ reply->writeInt32(tearDown(streamId));
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index b3dd140..a7bf8ab 100644
--- a/include/camera/camera2/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
@@ -138,6 +138,12 @@
* Preallocate buffers for a given output stream asynchronously.
*/
virtual status_t prepare(int streamId) = 0;
+
+ /**
+ * Free all unused buffers for a given output stream.
+ */
+ virtual status_t tearDown(int streamId) = 0;
+
};
// ----------------------------------------------------------------------------
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index a4b24d7..2ca3f1c 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -215,6 +215,7 @@
sp<MemoryDealer> mDealer[2];
sp<ANativeWindow> mNativeWindow;
+ int mNativeWindowUsageBits;
sp<AMessage> mInputFormat;
sp<AMessage> mOutputFormat;
sp<AMessage> mBaseOutputFormat;
@@ -266,7 +267,8 @@
status_t freeBuffer(OMX_U32 portIndex, size_t i);
status_t handleSetSurface(const sp<Surface> &surface);
- status_t setupNativeWindowSizeFormatAndUsage(ANativeWindow *nativeWindow /* nonnull */);
+ status_t setupNativeWindowSizeFormatAndUsage(
+ ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */);
status_t configureOutputBuffersFromNativeWindow(
OMX_U32 *nBufferCount, OMX_U32 *nBufferSize,
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
index 1eef211..4b5cd05 100644
--- a/include/media/stagefright/MediaSync.h
+++ b/include/media/stagefright/MediaSync.h
@@ -37,6 +37,7 @@
class IGraphicBufferConsumer;
class IGraphicBufferProducer;
struct MediaClock;
+struct VideoFrameScheduler;
// MediaSync manages media playback and its synchronization to a media clock
// source. It can be also used for video-only playback.
@@ -103,6 +104,9 @@
// MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
sp<const MediaClock> getMediaClock();
+ // Flush mediasync
+ void flush();
+
// Set the video frame rate hint - this is used by the video FrameScheduler
status_t setVideoFrameRateHint(float rate);
@@ -131,11 +135,10 @@
private:
enum {
- kWhatDrainVideo = 'dVid',
+ kWhatDrainVideo = 'dVid',
+ kWhatCheckFrameAvailable = 'cFrA',
};
- static const int MAX_OUTSTANDING_BUFFERS = 2;
-
// This is a thin wrapper class that lets us listen to
// IConsumerListener::onFrameAvailable from mInput.
class InputListener : public BnConsumerListener,
@@ -194,6 +197,8 @@
sp<IGraphicBufferConsumer> mInput;
sp<IGraphicBufferProducer> mOutput;
int mUsageFlagsFromOutput;
+ uint32_t mMaxAcquiredBufferCount; // max acquired buffer count
+ bool mReturnPendingInputFrame; // set while we are pending before acquiring an input frame
sp<AudioTrack> mAudioTrack;
uint32_t mNativeSampleRateInHz;
@@ -202,6 +207,7 @@
int64_t mNextBufferItemMediaUs;
List<BufferItem> mBufferItems;
+ sp<VideoFrameScheduler> mFrameScheduler;
// Keep track of buffers received from |mInput|. This is needed because
// it's possible the consumer of |mOutput| could return a different
@@ -242,8 +248,9 @@
// onBufferReleasedByOutput releases a buffer back to the input.
void onFrameAvailableFromInput();
- // Send |bufferItem| to the output for rendering.
- void renderOneBufferItem_l(const BufferItem &bufferItem);
+ // Send |bufferItem| to the output for rendering. If this is not the only
+ // buffer sent for rendering, check for any dropped frames in |checkInUs| us.
+ void renderOneBufferItem_l(const BufferItem &bufferItem, int64_t checkInUs);
// This implements the onBufferReleased callback from IProducerListener.
// It gets called from an OutputListener.
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/include/media/stagefright/VideoFrameScheduler.h
similarity index 93%
rename from media/libmediaplayerservice/VideoFrameScheduler.h
rename to include/media/stagefright/VideoFrameScheduler.h
index b1765c9..9d97dfd 100644
--- a/media/libmediaplayerservice/VideoFrameScheduler.h
+++ b/include/media/stagefright/VideoFrameScheduler.h
@@ -39,6 +39,9 @@
// returns the vsync period for the main display
nsecs_t getVsyncPeriod();
+ // returns the current frames-per-second, or 0.f if not primed
+ float getFrameRate();
+
void release();
static const size_t kHistorySize = 8;
@@ -54,8 +57,9 @@
void reset(float fps = -1);
// keep current estimate, but restart phase
void restart();
- // returns period
+ // returns period or 0 if not yet primed
nsecs_t addSample(nsecs_t time);
+ nsecs_t getPeriod() const;
private:
nsecs_t mPeriod;
diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h
index 24df85a..65f415a 100644
--- a/include/media/stagefright/foundation/ADebug.h
+++ b/include/media/stagefright/foundation/ADebug.h
@@ -117,7 +117,7 @@
};
- // parse the property or string to get the debug level for a component name
+ // parse the property or string to get a long-type level for a component name
// string format is:
// <level>[:<glob>][,<level>[:<glob>]...]
// - <level> is 0-5 corresponding to ADebug::Level
@@ -125,10 +125,14 @@
// matches all components
// - string is read left-to-right, and the last matching level is returned, or
// the def if no terms matched
+ static long GetLevelFromSettingsString(
+ const char *name, const char *value, long def);
+ static long GetLevelFromProperty(
+ const char *name, const char *value, long def);
+
+ // same for ADebug::Level - performs clamping to valid debug ranges
static Level GetDebugLevelFromProperty(
const char *name, const char *propertyName, Level def = kDebugNone);
- static Level GetDebugLevelFromString(
- const char *name, const char *value, Level def = kDebugNone);
// remove redundant segments of a codec name, and return a newly allocated
// string suitable for debugging
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 81a5e8c..c215abf 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -260,9 +260,10 @@
status_t MediaPlayer::prepareAsync_l()
{
if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {
- mPlayer->setAudioStreamType(mStreamType);
if (mAudioAttributesParcel != NULL) {
mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
+ } else {
+ mPlayer->setAudioStreamType(mStreamType);
}
mCurrentState = MEDIA_PLAYER_PREPARING;
return mPlayer->prepareAsync();
@@ -734,24 +735,28 @@
status_t MediaPlayer::setParameter(int key, const Parcel& request)
{
ALOGV("MediaPlayer::setParameter(%d)", key);
+ status_t status = INVALID_OPERATION;
Mutex::Autolock _l(mLock);
if (checkStateForKeySet_l(key) != OK) {
- return INVALID_OPERATION;
- }
- if (mPlayer != NULL) {
- return mPlayer->setParameter(key, request);
+ return status;
}
switch (key) {
case KEY_PARAMETER_AUDIO_ATTRIBUTES:
- // no player, save the marshalled audio attributes
+ // save the marshalled audio attributes
if (mAudioAttributesParcel != NULL) { delete mAudioAttributesParcel; };
mAudioAttributesParcel = new Parcel();
mAudioAttributesParcel->appendFrom(&request, 0, request.dataSize());
- return OK;
+ status = OK;
+ break;
default:
- ALOGV("setParameter: no active player");
- return INVALID_OPERATION;
+ ALOGV_IF(mPlayer == NULL, "setParameter: no active player");
+ break;
}
+
+ if (mPlayer != NULL) {
+ status = mPlayer->setParameter(key, request);
+ }
+ return status;
}
status_t MediaPlayer::getParameter(int key, Parcel *reply)
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 7f0cca2..4d1b587 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -21,7 +21,6 @@
StagefrightPlayer.cpp \
StagefrightRecorder.cpp \
TestPlayerStub.cpp \
- VideoFrameScheduler.cpp \
LOCAL_SHARED_LIBRARIES := \
libbinder \
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 0ecfb1e..abbbc20 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -45,6 +45,7 @@
#include <utils/Timers.h>
#include <utils/Vector.h>
+#include <media/AudioPolicyHelper.h>
#include <media/IMediaHTTPService.h>
#include <media/IRemoteDisplay.h>
#include <media/IRemoteDisplayClient.h>
@@ -1351,6 +1352,10 @@
mFlags(AUDIO_OUTPUT_FLAG_NONE)
{
ALOGV("AudioOutput(%d)", sessionId);
+ if (attr != NULL) {
+ mStreamType = audio_attributes_to_stream_type(attr);
+ }
+
setMinBufferCount();
}
@@ -1464,6 +1469,17 @@
void MediaPlayerService::AudioOutput::setAudioAttributes(const audio_attributes_t * attributes) {
Mutex::Autolock lock(mLock);
mAttributes = attributes;
+ if (attributes != NULL) {
+ mStreamType = audio_attributes_to_stream_type(attributes);
+ }
+}
+
+void MediaPlayerService::AudioOutput::setAudioStreamType(audio_stream_type_t streamType)
+{
+ // do not allow direct stream type modification if attributes have been set
+ if (mAttributes == NULL) {
+ mStreamType = streamType;
+ }
}
void MediaPlayerService::AudioOutput::deleteRecycledTrack_l()
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 9e6ca52..1c32597 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -107,8 +107,7 @@
virtual void flush();
virtual void pause();
virtual void close();
- void setAudioStreamType(audio_stream_type_t streamType) {
- mStreamType = streamType; }
+ void setAudioStreamType(audio_stream_type_t streamType);
virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
void setAudioAttributes(const audio_attributes_t * attributes);
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index 20193c3..cc6f743 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -25,7 +25,8 @@
$(TOP)/frameworks/av/media/libmediaplayerservice \
$(TOP)/frameworks/native/include/media/openmax
-LOCAL_CFLAGS += -Werror -Wall
+LOCAL_CFLAGS += -Werror -Wall -DENABLE_STAGEFRIGHT_EXPERIMENTS
+
LOCAL_CLANG := true
LOCAL_MODULE:= libstagefright_nuplayer
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 64d172e..7dc9be7 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -1005,7 +1005,9 @@
status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
- if (!track->mPackets->hasBufferAvailable(&finalResult)) {
+ // start pulling in more buffers if we only have one (or no) buffer left
+ // so that decoder has less chance of being starved
+ if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {
postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
}
@@ -1458,6 +1460,8 @@
track = &mVideoTrack;
if (mIsWidevine) {
maxBuffers = 2;
+ } else {
+ maxBuffers = 4;
}
break;
case MEDIA_TRACK_TYPE_AUDIO:
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index a9d8904..2fdc196 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -623,12 +623,19 @@
case kWhatSetVideoSurface:
{
- ALOGV("kWhatSetVideoSurface");
sp<RefBase> obj;
CHECK(msg->findObject("surface", &obj));
sp<Surface> surface = static_cast<Surface *>(obj.get());
- if (mSource == NULL || mSource->getFormat(false /* audio */) == NULL) {
+
+ ALOGD("onSetVideoSurface(%p, %s video decoder)",
+ surface.get(),
+ (mSource != NULL && mSource->getFormat(false /* audio */) != NULL
+ && mVideoDecoder != NULL) ? "have" : "no");
+
+ if (mSource == NULL || mSource->getFormat(false /* audio */) == NULL
+ // NOTE: mVideoDecoder's mSurface is always non-null
+ || (mVideoDecoder != NULL && mVideoDecoder->setVideoSurface(surface) == OK)) {
performSetSurface(surface);
break;
}
@@ -639,7 +646,7 @@
mDeferredActions.push_back(new SetSurfaceAction(surface));
- if (obj != NULL) {
+ if (obj != NULL || mAudioDecoder != NULL) {
if (mStarted) {
// Issue a seek to refresh the video screen only if started otherwise
// the extractor may not yet be started and will assert.
@@ -830,20 +837,6 @@
// Don't try to re-open audio sink if there's an existing decoder.
if (mAudioSink != NULL && mAudioDecoder == NULL) {
- sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
- sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
- audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
- const bool hasVideo = (videoFormat != NULL);
- const bool canOffload = canOffloadStream(
- audioMeta, hasVideo, mSource->isStreaming(), streamType);
- if (canOffload) {
- if (!mOffloadAudio) {
- mRenderer->signalEnableOffloadAudio();
- }
- // open audio sink early under offload mode.
- sp<AMessage> format = mSource->getFormat(true /*audio*/);
- tryOpenAudioSinkForOffload(format, hasVideo);
- }
instantiateDecoder(true, &mAudioDecoder);
}
@@ -1078,6 +1071,11 @@
CHECK(msg->findInt32("audio", &audio));
ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
+ if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED
+ || mFlushingAudio == SHUT_DOWN)) {
+ // Flush has been handled by tear down.
+ break;
+ }
handleFlushComplete(audio, false /* isDecoder */);
finishFlushIfPossible();
} else if (what == Renderer::kWhatVideoRenderingStart) {
@@ -1086,14 +1084,27 @@
ALOGV("media rendering started");
notifyListener(MEDIA_STARTED, 0, 0);
} else if (what == Renderer::kWhatAudioTearDown) {
- int64_t positionUs;
- CHECK(msg->findInt64("positionUs", &positionUs));
int32_t reason;
CHECK(msg->findInt32("reason", &reason));
ALOGV("Tear down audio with reason %d.", reason);
- closeAudioSink();
mAudioDecoder.clear();
++mAudioDecoderGeneration;
+ bool needsToCreateAudioDecoder = true;
+ if (mFlushingAudio == FLUSHING_DECODER) {
+ mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+ mFlushingAudio = FLUSHED;
+ finishFlushIfPossible();
+ } else if (mFlushingAudio == FLUSHING_DECODER_SHUTDOWN
+ || mFlushingAudio == SHUTTING_DOWN_DECODER) {
+ mFlushComplete[1 /* audio */][1 /* isDecoder */] = true;
+ mFlushingAudio = SHUT_DOWN;
+ finishFlushIfPossible();
+ needsToCreateAudioDecoder = false;
+ }
+ if (mRenderer == NULL) {
+ break;
+ }
+ closeAudioSink();
mRenderer->flush(
true /* audio */, false /* notifyComplete */);
if (mVideoDecoder != NULL) {
@@ -1101,23 +1112,11 @@
false /* audio */, false /* notifyComplete */);
}
+ int64_t positionUs;
+ CHECK(msg->findInt64("positionUs", &positionUs));
performSeek(positionUs);
- if (reason == Renderer::kDueToError) {
- sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
- sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
- audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
- const bool hasVideo = (videoFormat != NULL);
- const bool canOffload = canOffloadStream(
- audioMeta, hasVideo, mSource->isStreaming(), streamType);
- if (canOffload) {
- mRenderer->signalEnableOffloadAudio();
- sp<AMessage> format = mSource->getFormat(true /*audio*/);
- tryOpenAudioSinkForOffload(format, hasVideo);
- } else {
- mRenderer->signalDisableOffloadAudio();
- mOffloadAudio = false;
- }
+ if (reason == Renderer::kDueToError && needsToCreateAudioDecoder) {
instantiateDecoder(true /* audio */, &mAudioDecoder);
}
}
@@ -1449,6 +1448,36 @@
mRenderer->closeAudioSink();
}
+void NuPlayer::determineAudioModeChange() {
+ if (mSource == NULL || mAudioSink == NULL) {
+ return;
+ }
+
+ if (mRenderer == NULL) {
+ ALOGW("No renderer can be used to determine audio mode. Use non-offload for safety.");
+ mOffloadAudio = false;
+ return;
+ }
+
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+ audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
+ const bool hasVideo = (videoFormat != NULL);
+ const bool canOffload = canOffloadStream(
+ audioMeta, hasVideo, mSource->isStreaming(), streamType);
+ if (canOffload) {
+ if (!mOffloadAudio) {
+ mRenderer->signalEnableOffloadAudio();
+ }
+ // open audio sink early under offload mode.
+ sp<AMessage> format = mSource->getFormat(true /*audio*/);
+ tryOpenAudioSinkForOffload(format, hasVideo);
+ } else {
+ mRenderer->signalDisableOffloadAudio();
+ mOffloadAudio = false;
+ }
+}
+
status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {
if (*decoder != NULL) {
return OK;
@@ -1490,6 +1519,7 @@
++mAudioDecoderGeneration;
notify->setInt32("generation", mAudioDecoderGeneration);
+ determineAudioModeChange();
if (mOffloadAudio) {
const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL);
format->setInt32("has-video", hasVideo);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 38904bc..9f4c462 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -220,6 +220,7 @@
void tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo);
void closeAudioSink();
+ void determineAudioModeChange();
status_t instantiateDecoder(bool audio, sp<DecoderBase> *decoder);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 99a2a84..dcc28c4 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -87,6 +87,22 @@
return mStats;
}
+status_t NuPlayer::Decoder::setVideoSurface(const sp<Surface> &surface) {
+ if (surface == NULL || ADebug::isExperimentEnabled("legacy-setsurface")) {
+ return BAD_VALUE;
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);
+
+ msg->setObject("surface", surface);
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+ return err;
+}
+
void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
@@ -169,6 +185,46 @@
break;
}
+ case kWhatSetVideoSurface:
+ {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("surface", &obj));
+ sp<Surface> surface = static_cast<Surface *>(obj.get()); // non-null
+ int32_t err = INVALID_OPERATION;
+ // NOTE: in practice mSurface is always non-null, but checking here for completeness
+ if (mCodec != NULL && mSurface != NULL) {
+ // TODO: once AwesomePlayer is removed, remove this automatic connecting
+ // to the surface by MediaPlayerService.
+ //
+ // at this point MediaPlayerService::client has already connected to the
+ // surface, which MediaCodec does not expect
+ err = native_window_api_disconnect(surface.get(), NATIVE_WINDOW_API_MEDIA);
+ if (err == OK) {
+ err = mCodec->setSurface(surface);
+ ALOGI_IF(err, "codec setSurface returned: %d", err);
+ if (err == OK) {
+ // reconnect to the old surface as MPS::Client will expect to
+ // be able to disconnect from it.
+ (void)native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_MEDIA);
+ mSurface = surface;
+ }
+ }
+ if (err != OK) {
+ // reconnect to the new surface on error as MPS::Client will expect to
+ // be able to disconnect from it.
+ (void)native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA);
+ }
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
default:
DecoderBase::onMessageReceived(msg);
break;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index ceccb7a..ed0be62 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -32,6 +32,9 @@
virtual sp<AMessage> getStats() const;
+ // sets the output surface of video decoders.
+ virtual status_t setVideoSurface(const sp<Surface> &surface);
+
protected:
virtual ~Decoder();
@@ -50,6 +53,7 @@
enum {
kWhatCodecNotify = 'cdcN',
kWhatRenderBuffer = 'rndr',
+ kWhatSetVideoSurface = 'sSur'
};
sp<Surface> mSurface;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
index 8f030f0..b0dc01d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
@@ -27,6 +27,7 @@
struct ABuffer;
struct MediaCodec;
class MediaBuffer;
+class Surface;
struct NuPlayer::DecoderBase : public AHandler {
DecoderBase(const sp<AMessage> ¬ify);
@@ -36,6 +37,7 @@
void setParameters(const sp<AMessage> ¶ms);
void setRenderer(const sp<Renderer> &renderer);
+ virtual status_t setVideoSurface(const sp<Surface> &) { return INVALID_OPERATION; }
status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
void signalFlush();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 7e55aac..767417b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -29,8 +29,7 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
-
-#include <VideoFrameScheduler.h>
+#include <media/stagefright/VideoFrameScheduler.h>
#include <inttypes.h>
@@ -266,6 +265,7 @@
Mutex::Autolock autoLock(mLock);
if (audio) {
mNotifyCompleteAudio |= notifyComplete;
+ clearAudioFirstAnchorTime_l();
++mAudioQueueGeneration;
++mAudioDrainGeneration;
} else {
@@ -275,7 +275,6 @@
}
clearAnchorTime_l();
- clearAudioFirstAnchorTime_l();
mVideoLateByUs = 0;
mSyncQueues = false;
}
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 5210fc8..58ff113 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -138,7 +138,9 @@
}
void NuPlayer::RTSPSource::resume() {
- mHandler->resume();
+ if (mHandler != NULL) {
+ mHandler->resume();
+ }
}
status_t NuPlayer::RTSPSource::feedMoreTSData() {
@@ -295,13 +297,19 @@
sp<AMessage> msg = new AMessage(kWhatPerformSeek, this);
msg->setInt32("generation", ++mSeekGeneration);
msg->setInt64("timeUs", seekTimeUs);
- msg->post(200000ll);
- return OK;
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
}
void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) {
if (mState != CONNECTED) {
+ finishSeek(INVALID_OPERATION);
return;
}
@@ -320,9 +328,11 @@
} else if (msg->what() == kWhatPerformSeek) {
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
+ CHECK(msg->senderAwaitsResponse(&mSeekReplyID));
if (generation != mSeekGeneration) {
// obsolete.
+ finishSeek(OK);
return;
}
@@ -368,6 +378,37 @@
case MyHandler::kWhatSeekDone:
{
mState = CONNECTED;
+ if (mSeekReplyID != NULL) {
+ // Unblock seekTo here in case we attempted to seek in a live stream
+ finishSeek(OK);
+ }
+ break;
+ }
+
+ case MyHandler::kWhatSeekPaused:
+ {
+ sp<AnotherPacketSource> source = getSource(true /* audio */);
+ if (source != NULL) {
+ source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+ /* extra */ NULL,
+ /* discard */ true);
+ }
+ source = getSource(false /* video */);
+ if (source != NULL) {
+ source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE,
+ /* extra */ NULL,
+ /* discard */ true);
+ };
+
+ status_t err = OK;
+ msg->findInt32("err", &err);
+ finishSeek(err);
+
+ if (err == OK) {
+ int64_t timeUs;
+ CHECK(msg->findInt64("time", &timeUs));
+ mHandler->continueSeekAfterPause(timeUs);
+ }
break;
}
@@ -700,5 +741,12 @@
return true;
}
+void NuPlayer::RTSPSource::finishSeek(status_t err) {
+ CHECK(mSeekReplyID != NULL);
+ sp<AMessage> seekReply = new AMessage;
+ seekReply->setInt32("err", err);
+ seekReply->postReply(mSeekReplyID);
+ mSeekReplyID = NULL;
+}
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 5f2cf33..6438a1e 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -116,6 +116,8 @@
int64_t mEOSTimeoutAudio;
int64_t mEOSTimeoutVideo;
+ sp<AReplyToken> mSeekReplyID;
+
sp<AnotherPacketSource> getSource(bool audio);
void onConnected();
@@ -131,6 +133,7 @@
void setError(status_t err);
void startBufferingIfNecessary();
bool stopBufferingIfNecessary();
+ void finishSeek(status_t err);
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9206b5c..cebd577 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -493,6 +493,7 @@
ACodec::ACodec()
: mQuirks(0),
mNode(0),
+ mNativeWindowUsageBits(0),
mSentFormat(false),
mIsVideo(false),
mIsEncoder(false),
@@ -642,7 +643,7 @@
return OK;
}
- // allow keeping unset surface
+ // cannot switch from bytebuffers to surface
if (mNativeWindow == NULL) {
ALOGW("component was not configured with a surface");
return INVALID_OPERATION;
@@ -661,11 +662,20 @@
return INVALID_OPERATION;
}
- status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow);
+ int usageBits = 0;
+ status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits);
if (err != OK) {
return err;
}
+ int ignoredFlags = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER
+ | GRALLOC_USAGE_EXTERNAL_DISP);
+ // New output surface is not allowed to add new usage flag except ignored ones.
+ if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) {
+ ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits);
+ return BAD_VALUE;
+ }
+
// get min undequeued count. We cannot switch to a surface that has a higher
// undequeued count than we allocated.
int minUndequeuedBuffers = 0;
@@ -747,6 +757,7 @@
}
mNativeWindow = nativeWindow;
+ mNativeWindowUsageBits = usageBits;
return OK;
}
@@ -868,7 +879,8 @@
return OK;
}
-status_t ACodec::setupNativeWindowSizeFormatAndUsage(ANativeWindow *nativeWindow /* nonnull */) {
+status_t ACodec::setupNativeWindowSizeFormatAndUsage(
+ ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
@@ -894,6 +906,7 @@
}
usage |= GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP;
+ *finalUsage = usage;
ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
return setNativeWindowSizeFormatAndUsage(
@@ -916,9 +929,10 @@
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
- err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get());
+ err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits);
}
if (err != OK) {
+ mNativeWindowUsageBits = 0;
return err;
}
@@ -1937,6 +1951,7 @@
// to SW renderer
ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
mNativeWindow.clear();
+ mNativeWindowUsageBits = 0;
haveNativeWindow = false;
usingSwRenderer = true;
if (storingMetadataInDecodedBuffers()) {
@@ -4542,9 +4557,7 @@
sp<RefBase> obj;
CHECK(msg->findObject("surface", &obj));
- status_t err =
- ADebug::isExperimentEnabled("legacy-setsurface") ? BAD_VALUE :
- mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
+ status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get()));
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
@@ -5341,6 +5354,7 @@
}
mCodec->mNativeWindow.clear();
+ mCodec->mNativeWindowUsageBits = 0;
mCodec->mNode = 0;
mCodec->mOMX.clear();
mCodec->mQuirks = 0;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 69128bd..b86c749 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -64,6 +64,7 @@
TimedEventQueue.cpp \
Utils.cpp \
VBRISeeker.cpp \
+ VideoFrameScheduler.cpp \
WAVExtractor.cpp \
WVMExtractor.cpp \
XINGSeeker.cpp \
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index b696746..26b07d4 100755
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -4034,6 +4034,10 @@
CHECK(mBuffer == NULL);
return err;
}
+ if (size > mBuffer->size()) {
+ ALOGE("buffer too small: %zu > %zu", size, mBuffer->size());
+ return ERROR_BUFFER_TOO_SMALL;
+ }
}
if ((!mIsAVC && !mIsHEVC) || mWantsNALFragments) {
@@ -4294,6 +4298,10 @@
ALOGV("acquire_buffer returned %d", err);
return err;
}
+ if (size > mBuffer->size()) {
+ ALOGE("buffer too small: %zu > %zu", size, mBuffer->size());
+ return ERROR_BUFFER_TOO_SMALL;
+ }
}
const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 69f44ed..fb32d3a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -2528,6 +2528,7 @@
err = native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA);
if (err == BAD_VALUE) {
ALOGI("native window already connected. Assuming no change of surface");
+ return err;
} else if (err == OK) {
// Require a fresh set of buffers after each connect by using a unique generation
// number. Rely on the fact that max supported process id by Linux is 2^22.
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 7ea5cbd..5edc04c 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -216,6 +216,12 @@
String8(kPolicySupportsMultipleSecureCodecs),
String8(value.c_str())));
}
+ if (mGlobalSettings->findString(kPolicySupportsSecureWithNonSecureCodec, &value)) {
+ policies.push_back(
+ MediaResourcePolicy(
+ String8(kPolicySupportsSecureWithNonSecureCodec),
+ String8(value.c_str())));
+ }
if (policies.size() > 0) {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.resource_manager"));
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
index 52077a7..0df3ec9 100644
--- a/media/libstagefright/MediaSync.cpp
+++ b/media/libstagefright/MediaSync.cpp
@@ -25,6 +25,7 @@
#include <media/AudioTrack.h>
#include <media/stagefright/MediaClock.h>
#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/VideoFrameScheduler.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -50,6 +51,8 @@
mReleaseCondition(),
mNumOutstandingBuffers(0),
mUsageFlagsFromOutput(0),
+ mMaxAcquiredBufferCount(1),
+ mReturnPendingInputFrame(false),
mNativeSampleRateInHz(0),
mNumFramesWritten(0),
mHasAudio(false),
@@ -121,6 +124,11 @@
ALOGE("setSurface: failed to connect (%d)", status);
return status;
}
+
+ if (mFrameScheduler == NULL) {
+ mFrameScheduler = new VideoFrameScheduler();
+ mFrameScheduler->init();
+ }
}
if (mOutput != NULL) {
@@ -209,6 +217,12 @@
bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput);
*outBufferProducer = bufferProducer;
mInput = bufferConsumer;
+
+ // set undequeued buffer count
+ int minUndequeuedBuffers;
+ mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
+ mMaxAcquiredBufferCount = minUndequeuedBuffers;
+ bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
}
return status;
}
@@ -232,6 +246,7 @@
mNextBufferItemMediaUs = -1;
}
mPlaybackRate = rate;
+ // TODO: update frame scheduler with this info
mMediaClock->setPlaybackRate(rate);
onDrainVideo_l();
}
@@ -325,13 +340,44 @@
mInput->setConsumerName(String8(name.c_str()));
}
+void MediaSync::flush() {
+ Mutex::Autolock lock(mMutex);
+ if (mFrameScheduler != NULL) {
+ mFrameScheduler->restart();
+ }
+ while (!mBufferItems.empty()) {
+ BufferItem *bufferItem = &*mBufferItems.begin();
+ returnBufferToInput_l(bufferItem->mGraphicBuffer, bufferItem->mFence);
+ mBufferItems.erase(mBufferItems.begin());
+ }
+ mNextBufferItemMediaUs = -1;
+ mNumFramesWritten = 0;
+ mReturnPendingInputFrame = true;
+ mReleaseCondition.signal();
+ mMediaClock->clearAnchor();
+}
+
status_t MediaSync::setVideoFrameRateHint(float rate) {
- // ignored until we add the FrameScheduler
- return rate >= 0.f ? OK : BAD_VALUE;
+ Mutex::Autolock lock(mMutex);
+ if (rate < 0.f) {
+ return BAD_VALUE;
+ }
+ if (mFrameScheduler != NULL) {
+ mFrameScheduler->init(rate);
+ }
+ return OK;
}
float MediaSync::getVideoFrameRate() {
- // we don't know the frame rate
+ Mutex::Autolock lock(mMutex);
+ if (mFrameScheduler != NULL) {
+ float fps = mFrameScheduler->getFrameRate();
+ if (fps > 0.f) {
+ return fps;
+ }
+ }
+
+ // we don't have or know the frame rate
return -1.f;
}
@@ -470,7 +516,7 @@
CHECK_EQ(res, (status_t)OK);
numFramesPlayedAt = nowUs;
numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */
- //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
}
//can't be negative until 12.4 hrs, test.
@@ -510,18 +556,30 @@
int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
- if (itemRealUs <= nowUs) {
+ // adjust video frame PTS based on vsync
+ itemRealUs = mFrameScheduler->schedule(itemRealUs * 1000) / 1000;
+ int64_t oneVsyncUs = (mFrameScheduler->getVsyncPeriod() / 1000);
+ int64_t twoVsyncsUs = oneVsyncUs * 2;
+
+ // post 2 display refreshes before rendering is due
+ if (itemRealUs <= nowUs + twoVsyncsUs) {
+ ALOGV("adjusting PTS from %lld to %lld",
+ (long long)bufferItem->mTimestamp / 1000, (long long)itemRealUs);
+ bufferItem->mTimestamp = itemRealUs * 1000;
+ bufferItem->mIsAutoTimestamp = false;
+
if (mHasAudio) {
if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) {
- renderOneBufferItem_l(*bufferItem);
+ renderOneBufferItem_l(*bufferItem, nowUs + oneVsyncUs - itemRealUs);
} else {
// too late.
returnBufferToInput_l(
bufferItem->mGraphicBuffer, bufferItem->mFence);
+ mFrameScheduler->restart();
}
} else {
// always render video buffer in video-only mode.
- renderOneBufferItem_l(*bufferItem);
+ renderOneBufferItem_l(*bufferItem, nowUs + oneVsyncUs - itemRealUs);
// smooth out videos >= 10fps
mMediaClock->updateAnchor(
@@ -534,7 +592,7 @@
if (mNextBufferItemMediaUs == -1
|| mNextBufferItemMediaUs > itemMediaUs) {
sp<AMessage> msg = new AMessage(kWhatDrainVideo, this);
- msg->post(itemRealUs - nowUs);
+ msg->post(itemRealUs - nowUs - twoVsyncsUs);
mNextBufferItemMediaUs = itemMediaUs;
}
break;
@@ -545,10 +603,18 @@
void MediaSync::onFrameAvailableFromInput() {
Mutex::Autolock lock(mMutex);
+ const static nsecs_t kAcquireWaitTimeout = 2000000000; // 2 seconds
+
+ mReturnPendingInputFrame = false;
+
// If there are too many outstanding buffers, wait until a buffer is
// released back to the input in onBufferReleased.
- while (mNumOutstandingBuffers >= MAX_OUTSTANDING_BUFFERS) {
- mReleaseCondition.wait(mMutex);
+ // NOTE: BufferQueue allows dequeuing maxAcquiredBufferCount + 1 buffers
+ while (mNumOutstandingBuffers > mMaxAcquiredBufferCount
+ && !mIsAbandoned && !mReturnPendingInputFrame) {
+ if (mReleaseCondition.waitRelative(mMutex, kAcquireWaitTimeout) != OK) {
+ ALOGI("still waiting to release a buffer before acquire");
+ }
// If the sync is abandoned while we are waiting, the release
// condition variable will be broadcast, and we should just return
@@ -582,12 +648,21 @@
if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
// Something is wrong since this buffer should be at our hands, bail.
+ ALOGE("received buffer multiple times from input");
mInput->consumerDisconnect();
onAbandoned_l(true /* isInput */);
return;
}
mBuffersFromInput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
+ // If flush happened while waiting for a buffer to be released, simply return it
+ // TRICKY: do it here after it is detached so that we don't have to cache mGraphicBuffer.
+ if (mReturnPendingInputFrame) {
+ mReturnPendingInputFrame = false;
+ returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
+ return;
+ }
+
mBufferItems.push_back(bufferItem);
if (mBufferItems.size() == 1) {
@@ -595,7 +670,7 @@
}
}
-void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) {
+void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem, int64_t checkInUs) {
IGraphicBufferProducer::QueueBufferInput queueInput(
bufferItem.mTimestamp,
bufferItem.mIsAutoTimestamp,
@@ -635,6 +710,12 @@
mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
+
+ // If we have already queued more than one buffer, check for any free buffers in case
+ // one of them were dropped - as BQ does not signal onBufferReleased in that case.
+ if (mBuffersSentToOutput.size() > 1) {
+ (new AMessage(kWhatCheckFrameAvailable, this))->post(checkInUs);
+ }
}
void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {
@@ -646,32 +727,38 @@
sp<GraphicBuffer> buffer;
sp<Fence> fence;
- status_t status = mOutput->detachNextBuffer(&buffer, &fence);
- ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
+ status_t status;
+ // NOTE: This is a workaround for a BufferQueue bug where onBufferReleased is
+ // called only for released buffers, but not for buffers that were dropped during
+ // acquire. Dropped buffers can still be detached as they are on the free list.
+ // TODO: remove if released callback happens also for dropped buffers
+ while ((status = mOutput->detachNextBuffer(&buffer, &fence)) != NO_MEMORY) {
+ ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
- if (status == NO_INIT) {
- // If the output has been abandoned, we can't do anything else,
- // since buffer is invalid.
- onAbandoned_l(false /* isInput */);
- return;
+ if (status == NO_INIT) {
+ // If the output has been abandoned, we can't do anything else,
+ // since buffer is invalid.
+ onAbandoned_l(false /* isInput */);
+ return;
+ }
+
+ ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
+
+ // If we've been abandoned, we can't return the buffer to the input, so just
+ // move on.
+ if (mIsAbandoned) {
+ return;
+ }
+
+ ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
+ if (ix < 0) {
+ // The buffer is unknown, maybe leftover, ignore.
+ return;
+ }
+ mBuffersSentToOutput.removeItemsAt(ix);
+
+ returnBufferToInput_l(buffer, fence);
}
-
- ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
-
- // If we've been abandoned, we can't return the buffer to the input, so just
- // move on.
- if (mIsAbandoned) {
- return;
- }
-
- ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
- if (ix < 0) {
- // The buffer is unknown, maybe leftover, ignore.
- return;
- }
- mBuffersSentToOutput.removeItemsAt(ix);
-
- returnBufferToInput_l(buffer, fence);
}
void MediaSync::returnBufferToInput_l(
@@ -679,6 +766,7 @@
ssize_t ix = mBuffersFromInput.indexOfKey(buffer->getId());
if (ix < 0) {
// The buffer is unknown, something is wrong, bail.
+ ALOGE("output returned unknown buffer");
mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
onAbandoned_l(false /* isInput */);
return;
@@ -741,6 +829,12 @@
break;
}
+ case kWhatCheckFrameAvailable:
+ {
+ onBufferReleasedByOutput(mOutput);
+ break;
+ }
+
default:
TRESPASS();
break;
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp
similarity index 96%
rename from media/libmediaplayerservice/VideoFrameScheduler.cpp
rename to media/libstagefright/VideoFrameScheduler.cpp
index ce5f5fe..5fe9bf9 100644
--- a/media/libmediaplayerservice/VideoFrameScheduler.cpp
+++ b/media/libstagefright/VideoFrameScheduler.cpp
@@ -28,8 +28,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AUtils.h>
-
-#include "VideoFrameScheduler.h"
+#include <media/stagefright/VideoFrameScheduler.h>
namespace android {
@@ -56,7 +55,7 @@
static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
static const size_t kPrecision = 12;
-static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
+static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
static const int64_t kMultiplesThresholdDiv = 4; // 25%
static const int64_t kReFitThresholdDiv = 100; // 1%
static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
@@ -258,7 +257,8 @@
mPhase = firstTime;
}
}
- ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod);
+ ALOGV("priming[%zu] phase:%lld period:%lld",
+ numSamplesToUse, (long long)mPhase, (long long)mPeriod);
}
nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
@@ -316,6 +316,10 @@
return mPeriod;
}
+nsecs_t VideoFrameScheduler::PLL::getPeriod() const {
+ return mPrimed ? mPeriod : 0;
+}
+
/* ======================================================================= */
/* Frame Scheduler */
/* ======================================================================= */
@@ -382,6 +386,14 @@
return kDefaultVsyncPeriod;
}
+float VideoFrameScheduler::getFrameRate() {
+ nsecs_t videoPeriod = mPll.getPeriod();
+ if (videoPeriod > 0) {
+ return 1e9 / videoPeriod;
+ }
+ return 0.f;
+}
+
nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
nsecs_t origRenderTime = renderTime;
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index 5c05a0e..1db350f 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -82,7 +82,10 @@
initPorts(
kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
- CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+status_t SoftHEVC::init() {
+ return initDecoder();
}
SoftHEVC::~SoftHEVC() {
@@ -766,5 +769,10 @@
android::SoftOMXComponent *createSoftOMXComponent(const char *name,
const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
OMX_COMPONENTTYPE **component) {
- return new android::SoftHEVC(name, callbacks, appData, component);
+ android::SoftHEVC *codec = new android::SoftHEVC(name, callbacks, appData, component);
+ if (codec->init() != android::OK) {
+ android::sp<android::SoftOMXComponent> release = codec;
+ return NULL;
+ }
+ return codec;
}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
index a91f528..c6344cf 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -56,6 +56,8 @@
SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData, OMX_COMPONENTTYPE **component);
+ status_t init();
+
protected:
virtual ~SoftHEVC();
diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/libstagefright/foundation/ADebug.cpp
index 0d1cea4..24fa561 100644
--- a/media/libstagefright/foundation/ADebug.cpp
+++ b/media/libstagefright/foundation/ADebug.cpp
@@ -32,11 +32,10 @@
namespace android {
//static
-ADebug::Level ADebug::GetDebugLevelFromString(
- const char *name, const char *value, ADebug::Level def) {
+long ADebug::GetLevelFromSettingsString(
+ const char *name, const char *value, long def) {
// split on ,
const char *next = value, *current;
- const unsigned long maxLevel = (unsigned long)kDebugMax;
while (next != NULL) {
current = next;
next = strchr(current, ',');
@@ -52,8 +51,8 @@
// get level
char *end;
- errno = 0; // strtoul does not clear errno, but it can be set for any return value
- unsigned long level = strtoul(current, &end, 10);
+ errno = 0; // strtol does not clear errno, but it can be set for any return value
+ long level = strtol(current, &end, 10);
while (isspace(*end)) {
++end;
}
@@ -77,8 +76,18 @@
}
}
- // update debug level
- def = (Level)min(level, maxLevel);
+ // update value
+ def = level;
+ }
+ return def;
+}
+
+//static
+long ADebug::GetLevelFromProperty(
+ const char *name, const char *propertyName, long def) {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get(propertyName, value, NULL)) {
+ def = GetLevelFromSettingsString(name, value, def);
}
return def;
}
@@ -86,11 +95,8 @@
//static
ADebug::Level ADebug::GetDebugLevelFromProperty(
const char *name, const char *propertyName, ADebug::Level def) {
- char value[PROPERTY_VALUE_MAX];
- if (property_get(propertyName, value, NULL)) {
- return GetDebugLevelFromString(name, value, def);
- }
- return def;
+ long level = GetLevelFromProperty(name, propertyName, (long)def);
+ return (Level)min(max(level, (long)kDebugNone), (long)kDebugMax);
}
//static
@@ -118,6 +124,15 @@
bool ADebug::getExperimentFlag(
bool allow, const char *name, uint64_t modulo,
uint64_t limit, uint64_t plus, uint64_t timeDivisor) {
+ // see if this experiment should be disabled/enabled based on properties.
+ // default to 2 to allow 0/1 specification
+ const int undefined = 2;
+ long level = GetLevelFromProperty(name, "debug.stagefright.experiments", undefined);
+ if (level != undefined) {
+ ALOGI("experiment '%s': %s from property", name, level ? "ENABLED" : "disabled");
+ return level != 0;
+ }
+
static volatile int32_t haveSerial = 0;
static uint64_t serialNum;
if (!android_atomic_acquire_load(&haveSerial)) {
@@ -138,11 +153,10 @@
num = num * 256 + c;
}
}
- ALOGI("got serial");
serialNum = num;
android_atomic_release_store(1, &haveSerial);
}
- ALOGI("serial: %llu, time: %llu", (long long)serialNum, (long long)time(NULL));
+ ALOGD("serial: %llu, time: %lld", (long long unsigned)serialNum, (long long)time(NULL));
// MINOR: use modulo for counter and time, so that their sum does not
// roll over, and mess up the correlation between related experiments.
// e.g. keep (a mod 2N) = 0 impl (a mod N) = 0
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 27509cb..2fc5135 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -58,15 +58,21 @@
BandwidthEstimator();
void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
- bool estimateBandwidth(int32_t *bandwidth, bool *isStable = NULL);
+ bool estimateBandwidth(
+ int32_t *bandwidth,
+ bool *isStable = NULL,
+ int32_t *shortTermBps = NULL);
private:
// Bandwidth estimation parameters
+ static const int32_t kShortTermBandwidthItems = 3;
static const int32_t kMinBandwidthHistoryItems = 20;
static const int64_t kMinBandwidthHistoryWindowUs = 5000000ll; // 5 sec
static const int64_t kMaxBandwidthHistoryWindowUs = 30000000ll; // 30 sec
+ static const int64_t kMaxBandwidthHistoryAgeUs = 60000000ll; // 60 sec
struct BandwidthEntry {
+ int64_t mTimestampUs;
int64_t mDelayUs;
size_t mNumBytes;
};
@@ -74,6 +80,7 @@
Mutex mLock;
List<BandwidthEntry> mBandwidthHistory;
List<int32_t> mPrevEstimates;
+ int32_t mShortTermEstimate;
bool mHasNewSample;
bool mIsStable;
int64_t mTotalTransferTimeUs;
@@ -83,6 +90,7 @@
};
LiveSession::BandwidthEstimator::BandwidthEstimator() :
+ mShortTermEstimate(0),
mHasNewSample(false),
mIsStable(true),
mTotalTransferTimeUs(0),
@@ -93,7 +101,9 @@
size_t numBytes, int64_t delayUs) {
AutoMutex autoLock(mLock);
+ int64_t nowUs = ALooper::GetNowUs();
BandwidthEntry entry;
+ entry.mTimestampUs = nowUs;
entry.mDelayUs = delayUs;
entry.mNumBytes = numBytes;
mTotalTransferTimeUs += delayUs;
@@ -115,7 +125,10 @@
// and total transfer time at least kMaxBandwidthHistoryWindowUs.
while (mBandwidthHistory.size() > kMinBandwidthHistoryItems) {
List<BandwidthEntry>::iterator it = mBandwidthHistory.begin();
- if (mTotalTransferTimeUs - it->mDelayUs < bandwidthHistoryWindowUs) {
+ // remove sample if either absolute age or total transfer time is
+ // over kMaxBandwidthHistoryWindowUs
+ if (nowUs - it->mTimestampUs < kMaxBandwidthHistoryAgeUs &&
+ mTotalTransferTimeUs - it->mDelayUs < bandwidthHistoryWindowUs) {
break;
}
mTotalTransferTimeUs -= it->mDelayUs;
@@ -125,7 +138,7 @@
}
bool LiveSession::BandwidthEstimator::estimateBandwidth(
- int32_t *bandwidthBps, bool *isStable) {
+ int32_t *bandwidthBps, bool *isStable, int32_t *shortTermBps) {
AutoMutex autoLock(mLock);
if (mBandwidthHistory.size() < 2) {
@@ -137,6 +150,9 @@
if (isStable) {
*isStable = mIsStable;
}
+ if (shortTermBps) {
+ *shortTermBps = mShortTermEstimate;
+ }
return true;
}
@@ -147,6 +163,21 @@
}
mHasNewSample = false;
+ int64_t totalTimeUs = 0;
+ size_t totalBytes = 0;
+ if (mBandwidthHistory.size() >= kShortTermBandwidthItems) {
+ List<BandwidthEntry>::iterator it = --mBandwidthHistory.end();
+ for (size_t i = 0; i < kShortTermBandwidthItems; i++, it--) {
+ totalTimeUs += it->mDelayUs;
+ totalBytes += it->mNumBytes;
+ }
+ }
+ mShortTermEstimate = totalTimeUs > 0 ?
+ (totalBytes * 8E6 / totalTimeUs) : *bandwidthBps;
+ if (shortTermBps) {
+ *shortTermBps = mShortTermEstimate;
+ }
+
int32_t minEstimate = -1, maxEstimate = -1;
List<int32_t>::iterator it;
for (it = mPrevEstimates.begin(); it != mPrevEstimates.end(); it++) {
@@ -158,10 +189,14 @@
maxEstimate = estimate;
}
}
- mIsStable = (maxEstimate <= minEstimate * 4 / 3);
+ // consider it stable if long-term average is not jumping a lot
+ // and short-term average is not much lower than long-term average
+ mIsStable = (maxEstimate <= minEstimate * 4 / 3)
+ && mShortTermEstimate > minEstimate * 7 / 10;
if (isStable) {
- *isStable = mIsStable;
+ *isStable = mIsStable;
}
+
#if 0
{
char dumpStr[1024] = {0};
@@ -251,6 +286,7 @@
mCurBandwidthIndex(-1),
mOrigBandwidthIndex(-1),
mLastBandwidthBps(-1ll),
+ mLastBandwidthStable(false),
mBandwidthEstimator(new BandwidthEstimator()),
mMaxWidth(720),
mMaxHeight(480),
@@ -713,6 +749,20 @@
}
}
+ // remember the failure index (as mCurBandwidthIndex will be restored
+ // after cancelBandwidthSwitch()), and record last fail time
+ size_t failureIndex = mCurBandwidthIndex;
+ mBandwidthItems.editItemAt(
+ failureIndex).mLastFailureUs = ALooper::GetNowUs();
+
+ if (mSwitchInProgress) {
+ // if error happened when we switch to a variant, try fallback
+ // to other variant to save the session
+ if (tryBandwidthFallback()) {
+ break;
+ }
+ }
+
if (mInPreparationPhase) {
postPrepared(err);
}
@@ -887,6 +937,13 @@
}
// static
+bool LiveSession::isBandwidthValid(const BandwidthItem &item) {
+ static const int64_t kBlacklistWindowUs = 300 * 1000000ll;
+ return item.mLastFailureUs < 0
+ || ALooper::GetNowUs() - item.mLastFailureUs > kBlacklistWindowUs;
+}
+
+// static
int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) {
if (a->mBandwidth < b->mBandwidth) {
return -1;
@@ -986,6 +1043,7 @@
BandwidthItem item;
item.mPlaylistIndex = i;
+ item.mLastFailureUs = -1ll;
sp<AMessage> meta;
AString uri;
@@ -1223,6 +1281,13 @@
X/T < bw1 / (bw1 + bw0 - bw)
*/
+ // abort old bandwidth immediately if bandwidth is fluctuating a lot.
+ // our estimate could be far off, and fetching old bandwidth could
+ // take too long.
+ if (!mLastBandwidthStable) {
+ return 0.0f;
+ }
+
// Taking the measured current bandwidth at 50% face value only,
// as our bandwidth estimation is a lagging indicator. Being
// conservative on this, we prefer switching to lower bandwidth
@@ -1250,6 +1315,16 @@
mBandwidthEstimator->addBandwidthMeasurement(numBytes, delayUs);
}
+ssize_t LiveSession::getLowestValidBandwidthIndex() const {
+ for (size_t index = 0; index < mBandwidthItems.size(); index++) {
+ if (isBandwidthValid(mBandwidthItems[index])) {
+ return index;
+ }
+ }
+ // if playlists are all blacklisted, return 0 and hope it's alive
+ return 0;
+}
+
size_t LiveSession::getBandwidthIndex(int32_t bandwidthBps) {
if (mBandwidthItems.size() < 2) {
// shouldn't be here if we only have 1 bandwidth, check
@@ -1284,14 +1359,18 @@
}
}
- // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+ // Pick the highest bandwidth stream that's not currently blacklisted
+ // below or equal to estimated bandwidth.
index = mBandwidthItems.size() - 1;
- while (index > 0) {
+ ssize_t lowestBandwidth = getLowestValidBandwidthIndex();
+ while (index > lowestBandwidth) {
// be conservative (70%) to avoid overestimating and immediately
// switching down again.
size_t adjustedBandwidthBps = bandwidthBps * 7 / 10;
- if (mBandwidthItems.itemAt(index).mBandwidth <= adjustedBandwidthBps) {
+ const BandwidthItem &item = mBandwidthItems[index];
+ if (item.mBandwidth <= adjustedBandwidthBps
+ && isBandwidthValid(item)) {
break;
}
--index;
@@ -2172,21 +2251,57 @@
notify->post();
}
+bool LiveSession::tryBandwidthFallback() {
+ if (mInPreparationPhase || mReconfigurationInProgress) {
+ // Don't try fallback during prepare or reconfig.
+ // If error happens there, it's likely unrecoverable.
+ return false;
+ }
+ if (mCurBandwidthIndex > mOrigBandwidthIndex) {
+ // if we're switching up, simply cancel and resume old variant
+ cancelBandwidthSwitch(true /* resume */);
+ return true;
+ } else {
+ // if we're switching down, we're likely about to underflow (if
+ // not already underflowing). try the lowest viable bandwidth if
+ // not on that variant already.
+ ssize_t lowestValid = getLowestValidBandwidthIndex();
+ if (mCurBandwidthIndex > lowestValid) {
+ cancelBandwidthSwitch();
+ changeConfiguration(-1ll, lowestValid);
+ return true;
+ }
+ }
+ // return false if we couldn't find any fallback
+ return false;
+}
+
/*
* returns true if a bandwidth switch is actually needed (and started),
* returns false otherwise
*/
bool LiveSession::switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow) {
// no need to check bandwidth if we only have 1 bandwidth settings
- if (mSwitchInProgress || mBandwidthItems.size() < 2) {
+ if (mBandwidthItems.size() < 2) {
return false;
}
- int32_t bandwidthBps;
+ if (mSwitchInProgress) {
+ if (mBuffering) {
+ tryBandwidthFallback();
+ }
+ return false;
+ }
+
+ int32_t bandwidthBps, shortTermBps;
bool isStable;
- if (mBandwidthEstimator->estimateBandwidth(&bandwidthBps, &isStable)) {
- ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+ if (mBandwidthEstimator->estimateBandwidth(
+ &bandwidthBps, &isStable, &shortTermBps)) {
+ ALOGV("bandwidth estimated at %.2f kbps, "
+ "stable %d, shortTermBps %.2f kbps",
+ bandwidthBps / 1024.0f, isStable, shortTermBps / 1024.0f);
mLastBandwidthBps = bandwidthBps;
+ mLastBandwidthStable = isStable;
} else {
ALOGV("no bandwidth estimate.");
return false;
@@ -2203,9 +2318,13 @@
if (canSwitchDown || canSwitchUp) {
// bandwidth estimating has some delay, if we have to downswitch when
- // it hasn't stabilized, be very conservative on bandwidth.
+ // it hasn't stabilized, use the short term to guess real bandwidth,
+ // since it may be dropping too fast.
+ // (note this doesn't apply to upswitch, always use longer average there)
if (!isStable && canSwitchDown) {
- bandwidthBps /= 2;
+ if (shortTermBps < bandwidthBps) {
+ bandwidthBps = shortTermBps;
+ }
}
ssize_t bandwidthIndex = getBandwidthIndex(bandwidthBps);
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 21be413..90d56d0 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -146,6 +146,7 @@
struct BandwidthItem {
size_t mPlaylistIndex;
unsigned long mBandwidth;
+ int64_t mLastFailureUs;
};
struct FetcherInfo {
@@ -199,6 +200,7 @@
ssize_t mCurBandwidthIndex;
ssize_t mOrigBandwidthIndex;
int32_t mLastBandwidthBps;
+ bool mLastBandwidthStable;
sp<BandwidthEstimator> mBandwidthEstimator;
sp<M3UParser> mPlaylist;
@@ -268,8 +270,10 @@
ssize_t currentBWIndex, ssize_t targetBWIndex) const;
void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
size_t getBandwidthIndex(int32_t bandwidthBps);
+ ssize_t getLowestValidBandwidthIndex() const;
HLSTime latestMediaSegmentStartTime() const;
+ static bool isBandwidthValid(const BandwidthItem &item);
static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);
static StreamType indexToType(int idx);
static ssize_t typeToIndex(int32_t type);
@@ -287,6 +291,7 @@
sp<AMessage> &msg, int64_t delayUs, bool *needResumeUntil);
bool switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow);
+ bool tryBandwidthFallback();
void schedulePollBuffering();
void cancelPollBuffering();
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 4851528..72d832e 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -511,6 +511,13 @@
msg->post();
}
+/*
+ * pauseAsync
+ *
+ * threshold: 0.0f - pause after current fetch block (default 47Kbytes)
+ * -1.0f - pause after finishing current segment
+ * 0.0~1.0f - pause if remaining of current segment exceeds threshold
+ */
void PlaylistFetcher::pauseAsync(
float thresholdRatio, bool disconnect) {
setStoppingThreshold(thresholdRatio, disconnect);
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 0878a1b..cabde32 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -394,6 +394,20 @@
return false;
}
+size_t AnotherPacketSource::getAvailableBufferCount(status_t *finalResult) {
+ Mutex::Autolock autoLock(mLock);
+
+ *finalResult = OK;
+ if (!mEnabled) {
+ return 0;
+ }
+ if (!mBuffers.empty()) {
+ return mBuffers.size();
+ }
+ *finalResult = mEOSResult;
+ return 0;
+}
+
int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) {
Mutex::Autolock autoLock(mLock);
*finalResult = mEOSResult;
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index eb9dc9b..28a0e89 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -49,6 +49,10 @@
// Returns true if we have packets that's not discontinuities
bool hasDataBufferAvailable(status_t *finalResult);
+ // Returns the number of available buffers. finalResult is always OK
+ // if this method returns non-0, or the final result if it returns 0.
+ size_t getAvailableBufferCount(status_t *finalResult);
+
// Returns the difference between the last and the first queued
// presentation timestamps since the last discontinuity (if any).
int64_t getBufferedDurationUs(status_t *finalResult);
diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
index aae3e9f..cbe9673 100644
--- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
+++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
@@ -264,8 +264,19 @@
if (event.isInit()) {
for (size_t i = 0; i < mSourceImpls.size(); ++i) {
if (mSourceImpls[i].get() == event.getMediaSource().get()) {
- mSyncPoints.editItemAt(i).add(
- event.getTimeUs(), event.getOffset());
+ KeyedVector<int64_t, off64_t> *syncPoints = &mSyncPoints.editItemAt(i);
+ syncPoints->add(event.getTimeUs(), event.getOffset());
+ // We're keeping the size of the sync points at most 5mb per a track.
+ size_t size = syncPoints->size();
+ if (size >= 327680) {
+ int64_t firstTimeUs = syncPoints->keyAt(0);
+ int64_t lastTimeUs = syncPoints->keyAt(size - 1);
+ if (event.getTimeUs() - firstTimeUs > lastTimeUs - event.getTimeUs()) {
+ syncPoints->removeItemsAt(0, 4096);
+ } else {
+ syncPoints->removeItemsAt(size - 4096, 4096);
+ }
+ }
break;
}
}
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index e64a7a1..0d0baf3 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -98,6 +98,7 @@
enum {
kWhatConnected = 'conn',
kWhatDisconnected = 'disc',
+ kWhatSeekPaused = 'spau',
kWhatSeekDone = 'sdon',
kWhatAccessUnit = 'accU',
@@ -220,6 +221,12 @@
msg->post();
}
+ void continueSeekAfterPause(int64_t timeUs) {
+ sp<AMessage> msg = new AMessage('see1', this);
+ msg->setInt64("time", timeUs);
+ msg->post();
+ }
+
bool isSeekable() const {
return mSeekable;
}
@@ -1180,7 +1187,7 @@
mCheckPending = true;
++mCheckGeneration;
- sp<AMessage> reply = new AMessage('see1', this);
+ sp<AMessage> reply = new AMessage('see0', this);
reply->setInt64("time", timeUs);
if (mPausing) {
@@ -1203,9 +1210,26 @@
break;
}
- case 'see1':
+ case 'see0':
{
// Session is paused now.
+ status_t err = OK;
+ msg->findInt32("result", &err);
+
+ int64_t timeUs;
+ CHECK(msg->findInt64("time", &timeUs));
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatSeekPaused);
+ notify->setInt32("err", err);
+ notify->setInt64("time", timeUs);
+ notify->post();
+ break;
+
+ }
+
+ case 'see1':
+ {
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp
index c1e663c..d736501 100644
--- a/media/libstagefright/tests/Utils_test.cpp
+++ b/media/libstagefright/tests/Utils_test.cpp
@@ -109,21 +109,21 @@
TEST_F(UtilsTest, TestDebug) {
#define LVL(x) (ADebug::Level)(x)
- ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "", LVL(5)), LVL(5));
- ASSERT_EQ(ADebug::GetDebugLevelFromString("video", " \t \n ", LVL(2)), LVL(2));
- ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3", LVL(5)), LVL(3));
- ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo", LVL(5)), LVL(3));
- ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "", LVL(5)), LVL(5));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", " \t \n ", LVL(2)), LVL(2));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "3", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "3:*deo", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString(
"video", "\t\n 3 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
- ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo,2:vid*", LVL(5)), LVL(2));
- ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "3:*deo,2:vid*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString(
"avideo", "\t\n 3 \t\n:\t\n avideo \t\n,\t\n 2 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
- ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString(
"audio.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(2));
- ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString(
"video.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
- ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
- ASSERT_EQ(ADebug::GetDebugLevelFromString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetLevelFromSettingsString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4));
#undef LVL
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0880c5d..d9f1a83 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4487,9 +4487,16 @@
sp<Track> previousTrack = mPreviousTrack.promote();
sp<Track> latestTrack = mLatestActiveTrack.promote();
- if (previousTrack != 0 && latestTrack != 0 &&
- (previousTrack->sessionId() != latestTrack->sessionId())) {
- mFlushPending = true;
+ if (previousTrack != 0 && latestTrack != 0) {
+ if (mType == DIRECT) {
+ if (previousTrack.get() != latestTrack.get()) {
+ mFlushPending = true;
+ }
+ } else /* mType == OFFLOAD */ {
+ if (previousTrack->sessionId() != latestTrack->sessionId()) {
+ mFlushPending = true;
+ }
+ }
}
PlaybackThread::onAddNewTrack_l();
}
@@ -4582,12 +4589,8 @@
if (track != previousTrack.get()) {
// Flush any data still being written from last track
mBytesRemaining = 0;
- // flush data already sent if changing audio session as audio
- // comes from a different source. Also invalidate previous track to force a
- // seek when resuming.
- if (previousTrack->sessionId() != track->sessionId()) {
- previousTrack->invalidate();
- }
+ // Invalidate previous track to force a seek when resuming.
+ previousTrack->invalidate();
}
}
mPreviousTrack = track;
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
index 46b2725..a523656 100755
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.mk
@@ -26,13 +26,8 @@
LOCAL_SHARED_LIBRARIES := \
libaudiopolicyengineconfigurable \
libparameter \
- libicuuc \
- liblog \
-
-LOCAL_STATIC_LIBRARIES := \
libxmlserializer \
- libpfw_utility \
- libxml2 \
+ liblog \
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE := libpolicy-subsystem
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 3b83f63..c717a56 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -719,6 +719,38 @@
return res;
}
+status_t CameraDeviceClient::tearDown(int streamId) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+
+ status_t res = OK;
+ if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+
+ Mutex::Autolock icl(mBinderSerializationLock);
+
+ // Guard against trying to prepare non-created streams
+ ssize_t index = NAME_NOT_FOUND;
+ for (size_t i = 0; i < mStreamMap.size(); ++i) {
+ if (streamId == mStreamMap.valueAt(i)) {
+ index = i;
+ break;
+ }
+ }
+
+ if (index == NAME_NOT_FOUND) {
+ ALOGW("%s: Camera %d: Invalid stream ID (%d) specified, no stream "
+ "created yet", __FUNCTION__, mCameraId, streamId);
+ return BAD_VALUE;
+ }
+
+ // Also returns BAD_VALUE if stream ID was not valid or if the stream is in
+ // use
+ res = mDevice->tearDown(streamId);
+
+ return res;
+}
+
+
status_t CameraDeviceClient::dump(int fd, const Vector<String16>& args) {
String8 result;
result.appendFormat("CameraDeviceClient[%d] (%p) dump:\n",
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 0f485ca..1f8b39d 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -111,6 +111,9 @@
// Prepare stream by preallocating its buffers
virtual status_t prepare(int streamId);
+ // Tear down stream resources by freeing its unused buffers
+ virtual status_t tearDown(int streamId);
+
/**
* Interface used by CameraService
*/
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 06177e3..cd25949 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -289,6 +289,11 @@
virtual status_t prepare(int streamId) = 0;
/**
+ * Free stream resources by dumping its unused gralloc buffers.
+ */
+ virtual status_t tearDown(int streamId) = 0;
+
+ /**
* Get the HAL device version.
*/
virtual uint32_t getDeviceVersion() = 0;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index dfe5565..c9c990c 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -626,6 +626,12 @@
return NO_INIT;
}
+status_t Camera2Device::tearDown(int streamId) {
+ ATRACE_CALL();
+ ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
+ return NO_INIT;
+}
+
uint32_t Camera2Device::getDeviceVersion() {
ATRACE_CALL();
return mDeviceVersion;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index c9f3a2c..34c1ded 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -85,8 +85,9 @@
buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
// Flush implemented as just a wait
virtual status_t flush(int64_t *lastFrameNumber = NULL);
- // Prepare is a no-op
+ // Prepare and tearDown are no-ops
virtual status_t prepare(int streamId);
+ virtual status_t tearDown(int streamId);
virtual uint32_t getDeviceVersion();
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 9e73b5c..3afbd89 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -1384,6 +1384,37 @@
return mPreparerThread->prepare(stream);
}
+status_t Camera3Device::tearDown(int streamId) {
+ ATRACE_CALL();
+ ALOGV("%s: Camera %d: Tearing down stream %d", __FUNCTION__, mId, streamId);
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ // Teardown can only be accomplished on devices that don't require register_stream_buffers,
+ // since we cannot call register_stream_buffers except right after configure_streams.
+ if (mHal3Device->common.version < CAMERA_DEVICE_API_VERSION_3_2) {
+ ALOGE("%s: Unable to tear down streams on device HAL v%x",
+ __FUNCTION__, mHal3Device->common.version);
+ return NO_INIT;
+ }
+
+ sp<Camera3StreamInterface> stream;
+ ssize_t outputStreamIdx = mOutputStreams.indexOfKey(streamId);
+ if (outputStreamIdx == NAME_NOT_FOUND) {
+ CLOGE("Stream %d does not exist", streamId);
+ return BAD_VALUE;
+ }
+
+ stream = mOutputStreams.editValueAt(outputStreamIdx);
+
+ if (stream->hasOutstandingBuffers() || mRequestThread->isStreamPending(stream)) {
+ CLOGE("Stream %d is a target of a in-progress request", streamId);
+ return BAD_VALUE;
+ }
+
+ return stream->tearDown();
+}
+
uint32_t Camera3Device::getDeviceVersion() {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 31b6132..140da98 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -141,6 +141,8 @@
virtual status_t prepare(int streamId);
+ virtual status_t tearDown(int streamId);
+
virtual uint32_t getDeviceVersion();
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4c40bb6..2527fd6 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -364,6 +364,61 @@
return res;
}
+status_t Camera3Stream::tearDown() {
+ ATRACE_CALL();
+ Mutex::Autolock l(mLock);
+
+ status_t res = OK;
+
+ // This function should be only called when the stream is configured.
+ if (mState != STATE_CONFIGURED) {
+ ALOGE("%s: Stream %d: Can't tear down stream if stream is not in "
+ "CONFIGURED state %d", __FUNCTION__, mId, mState);
+ return INVALID_OPERATION;
+ }
+
+ // If any buffers have been handed to the HAL, the stream cannot be torn down.
+ if (getHandoutOutputBufferCountLocked() > 0) {
+ ALOGE("%s: Stream %d: Can't tear down a stream that has outstanding buffers",
+ __FUNCTION__, mId);
+ return INVALID_OPERATION;
+ }
+
+ // Free buffers by disconnecting and then reconnecting to the buffer queue
+ // Only unused buffers will be dropped immediately; buffers that have been filled
+ // and are waiting to be acquired by the consumer and buffers that are currently
+ // acquired will be freed once they are released by the consumer.
+
+ res = disconnectLocked();
+ if (res != OK) {
+ if (res == -ENOTCONN) {
+ // queue has been disconnected, nothing left to do, so exit with success
+ return OK;
+ }
+ ALOGE("%s: Stream %d: Unable to disconnect to tear down buffers: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+
+ mState = STATE_IN_CONFIG;
+
+ res = configureQueueLocked();
+ if (res != OK) {
+ ALOGE("%s: Unable to configure stream %d queue: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ mState = STATE_ERROR;
+ return res;
+ }
+
+ // Reset prepared state, since we've reconnected to the queue and can prepare again.
+ mPrepared = false;
+ mStreamUnpreparable = false;
+
+ mState = STATE_CONFIGURED;
+
+ return OK;
+}
+
status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 0543c66..bab2177 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -247,6 +247,20 @@
status_t cancelPrepare();
/**
+ * Tear down memory for this stream. This frees all unused gralloc buffers
+ * allocated for this stream, but leaves it ready for operation afterward.
+ *
+ * May only be called in the CONFIGURED state, and keeps the stream in
+ * the CONFIGURED state.
+ *
+ * Returns:
+ * OK if teardown succeeded.
+ * INVALID_OPERATION if not in the CONFIGURED state
+ * NO_INIT in case of a serious error from the HAL device
+ */
+ status_t tearDown();
+
+ /**
* Fill in the camera3_stream_buffer with the next valid buffer for this
* stream, to hand over to the HAL.
*
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6c87a45..c086eaf 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -152,6 +152,20 @@
virtual status_t cancelPrepare() = 0;
/**
+ * Tear down memory for this stream. This frees all unused gralloc buffers
+ * allocated for this stream, but leaves it ready for operation afterward.
+ *
+ * May only be called in the CONFIGURED state, and keeps the stream in
+ * the CONFIGURED state.
+ *
+ * Returns:
+ * OK if teardown succeeded.
+ * INVALID_OPERATION if not in the CONFIGURED state
+ * NO_INIT in case of a serious error from the HAL device
+ */
+ virtual status_t tearDown() = 0;
+
+ /**
* Fill in the camera3_stream_buffer with the next valid buffer for this
* stream, to hand over to the HAL.
*
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index e2b6695..61147ff 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -204,6 +204,17 @@
}
}
+void ResourceManagerService::getClientForResource_l(
+ int callingPid, const MediaResource *res, Vector<sp<IResourceManagerClient>> *clients) {
+ if (res == NULL) {
+ return;
+ }
+ sp<IResourceManagerClient> client;
+ if (getLowestPriorityBiggestClient_l(callingPid, res->mType, &client)) {
+ clients->push_back(client);
+ }
+}
+
bool ResourceManagerService::reclaimResource(
int callingPid, const Vector<MediaResource> &resources) {
String8 log = String8::format("reclaimResource(callingPid %d, resources %s)",
@@ -213,54 +224,61 @@
Vector<sp<IResourceManagerClient>> clients;
{
Mutex::Autolock lock(mLock);
- // first pass to handle secure/non-secure codec conflict
+ const MediaResource *secureCodec = NULL;
+ const MediaResource *nonSecureCodec = NULL;
+ const MediaResource *graphicMemory = NULL;
for (size_t i = 0; i < resources.size(); ++i) {
String8 type = resources[i].mType;
- if (type == kResourceSecureCodec) {
- if (!mSupportsMultipleSecureCodecs) {
- if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
- return false;
- }
- }
- if (!mSupportsSecureWithNonSecureCodec) {
- if (!getAllClients_l(callingPid, String8(kResourceNonSecureCodec), &clients)) {
- return false;
- }
- }
+ if (resources[i].mType == kResourceSecureCodec) {
+ secureCodec = &resources[i];
} else if (type == kResourceNonSecureCodec) {
- if (!mSupportsSecureWithNonSecureCodec) {
- if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
- return false;
- }
+ nonSecureCodec = &resources[i];
+ } else if (type == kResourceGraphicMemory) {
+ graphicMemory = &resources[i];
+ }
+ }
+
+ // first pass to handle secure/non-secure codec conflict
+ if (secureCodec != NULL) {
+ if (!mSupportsMultipleSecureCodecs) {
+ if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+ return false;
+ }
+ }
+ if (!mSupportsSecureWithNonSecureCodec) {
+ if (!getAllClients_l(callingPid, String8(kResourceNonSecureCodec), &clients)) {
+ return false;
+ }
+ }
+ }
+ if (nonSecureCodec != NULL) {
+ if (!mSupportsSecureWithNonSecureCodec) {
+ if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+ return false;
}
}
}
if (clients.size() == 0) {
// if no secure/non-secure codec conflict, run second pass to handle other resources.
- for (size_t i = 0; i < resources.size(); ++i) {
- String8 type = resources[i].mType;
- if (type == kResourceGraphicMemory) {
- sp<IResourceManagerClient> client;
- if (!getLowestPriorityBiggestClient_l(callingPid, type, &client)) {
- return false;
- }
- clients.push_back(client);
- }
- }
+ getClientForResource_l(callingPid, graphicMemory, &clients);
}
if (clients.size() == 0) {
// if we are here, run the third pass to free one codec with the same type.
- for (size_t i = 0; i < resources.size(); ++i) {
- String8 type = resources[i].mType;
- if (type == kResourceSecureCodec || type == kResourceNonSecureCodec) {
- sp<IResourceManagerClient> client;
- if (!getLowestPriorityBiggestClient_l(callingPid, type, &client)) {
- return false;
- }
- clients.push_back(client);
- }
+ getClientForResource_l(callingPid, secureCodec, &clients);
+ getClientForResource_l(callingPid, nonSecureCodec, &clients);
+ }
+
+ if (clients.size() == 0) {
+ // if we are here, run the fourth pass to free one codec with the different type.
+ if (secureCodec != NULL) {
+ MediaResource temp(String8(kResourceNonSecureCodec), 1);
+ getClientForResource_l(callingPid, &temp, &clients);
+ }
+ if (nonSecureCodec != NULL) {
+ MediaResource temp(String8(kResourceSecureCodec), 1);
+ getClientForResource_l(callingPid, &temp, &clients);
}
}
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 0d9d878..ca218fc 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -65,6 +65,9 @@
virtual void removeResource(int64_t clientId);
+ // Tries to reclaim resource from processes with lower priority than the calling process
+ // according to the requested resources.
+ // Returns true if any resource has been reclaimed, otherwise returns false.
virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources);
protected:
@@ -95,6 +98,11 @@
bool isCallingPriorityHigher_l(int callingPid, int pid);
+ // A helper function basically calls getLowestPriorityBiggestClient_l and add the result client
+ // to the given Vector.
+ void getClientForResource_l(
+ int callingPid, const MediaResource *res, Vector<sp<IResourceManagerClient>> *clients);
+
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;
sp<ServiceLog> mServiceLog;
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index 3d53f1f..8ae6a55 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -79,6 +79,10 @@
static const int kTestPid1 = 30;
static const int kTestPid2 = 20;
+static const int kLowPriorityPid = 40;
+static const int kMidPriorityPid = 25;
+static const int kHighPriorityPid = 10;
+
class ResourceManagerServiceTest : public ::testing::Test {
public:
ResourceManagerServiceTest()
@@ -227,15 +231,12 @@
String8 type = String8(kResourceSecureCodec);
String8 unknowType = String8("unknowType");
Vector<sp<IResourceManagerClient> > clients;
- int lowPriorityPid = 100;
- EXPECT_FALSE(mService->getAllClients_l(lowPriorityPid, type, &clients));
- int midPriorityPid = 25;
+ EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, &clients));
// some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
// will fail.
- EXPECT_FALSE(mService->getAllClients_l(midPriorityPid, type, &clients));
- int highPriorityPid = 10;
- EXPECT_TRUE(mService->getAllClients_l(highPriorityPid, unknowType, &clients));
- EXPECT_TRUE(mService->getAllClients_l(highPriorityPid, type, &clients));
+ EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, &clients));
+ EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, unknowType, &clients));
+ EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, &clients));
EXPECT_EQ(2u, clients.size());
EXPECT_EQ(mTestClient3, clients[0]);
@@ -254,19 +255,19 @@
mService->mSupportsSecureWithNonSecureCodec = true;
// priority too low
- EXPECT_FALSE(mService->reclaimResource(40, resources));
- EXPECT_FALSE(mService->reclaimResource(25, resources));
+ EXPECT_FALSE(mService->reclaimResource(kLowPriorityPid, resources));
+ EXPECT_FALSE(mService->reclaimResource(kMidPriorityPid, resources));
// reclaim all secure codecs
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(true, false, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(true /* c1 */, false /* c2 */, true /* c3 */);
// call again should reclaim one largest graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, true, false);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
// nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ EXPECT_FALSE(mService->reclaimResource(kHighPriorityPid, resources));
}
// ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ###
@@ -276,15 +277,15 @@
mService->mSupportsSecureWithNonSecureCodec = false;
// priority too low
- EXPECT_FALSE(mService->reclaimResource(40, resources));
- EXPECT_FALSE(mService->reclaimResource(25, resources));
+ EXPECT_FALSE(mService->reclaimResource(kLowPriorityPid, resources));
+ EXPECT_FALSE(mService->reclaimResource(kMidPriorityPid, resources));
// reclaim all secure and non-secure codecs
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(true, true, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(true /* c1 */, true /* c2 */, true /* c3 */);
// nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ EXPECT_FALSE(mService->reclaimResource(kHighPriorityPid, resources));
}
@@ -295,23 +296,23 @@
mService->mSupportsSecureWithNonSecureCodec = false;
// priority too low
- EXPECT_FALSE(mService->reclaimResource(40, resources));
- EXPECT_FALSE(mService->reclaimResource(25, resources));
+ EXPECT_FALSE(mService->reclaimResource(kLowPriorityPid, resources));
+ EXPECT_FALSE(mService->reclaimResource(kMidPriorityPid, resources));
// reclaim all non-secure codecs
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, true, false);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
// call again should reclaim one largest graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(true, false, false);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
// call again should reclaim another largest graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, false, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
// nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ EXPECT_FALSE(mService->reclaimResource(kHighPriorityPid, resources));
}
// ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
@@ -321,22 +322,22 @@
mService->mSupportsSecureWithNonSecureCodec = true;
// priority too low
- EXPECT_FALSE(mService->reclaimResource(40, resources));
+ EXPECT_FALSE(mService->reclaimResource(kLowPriorityPid, resources));
- EXPECT_TRUE(mService->reclaimResource(10, resources));
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
// one largest graphic memory from lowest process got reclaimed
- verifyClients(true, false, false);
+ verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
// call again should reclaim another graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, true, false);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
// call again should reclaim another graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, false, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
// nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ EXPECT_FALSE(mService->reclaimResource(kHighPriorityPid, resources));
}
// ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
@@ -348,19 +349,17 @@
Vector<MediaResource> resources;
resources.push_back(MediaResource(String8(kResourceSecureCodec), 1));
- EXPECT_TRUE(mService->reclaimResource(10, resources));
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
// secure codec from lowest process got reclaimed
- verifyClients(true, false, false);
+ verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
// call again should reclaim another secure codec from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, false, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
- // nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
-
- // clean up client 2 which still has non secure codec left
- mService->removeResource((int64_t) mTestClient2.get());
+ // no more secure codec, non-secure codec will be reclaimed.
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
}
}
@@ -375,19 +374,19 @@
mService->mSupportsSecureWithNonSecureCodec = false;
// priority too low
- EXPECT_FALSE(mService->reclaimResource(40, resources));
- EXPECT_FALSE(mService->reclaimResource(25, resources));
+ EXPECT_FALSE(mService->reclaimResource(kLowPriorityPid, resources));
+ EXPECT_FALSE(mService->reclaimResource(kMidPriorityPid, resources));
// reclaim all secure codecs
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(true, false, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(true /* c1 */, false /* c2 */, true /* c3 */);
// call again should reclaim one graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, true, false);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
// nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ EXPECT_FALSE(mService->reclaimResource(kHighPriorityPid, resources));
}
@@ -397,22 +396,22 @@
mService->mSupportsSecureWithNonSecureCodec = true;
// priority too low
- EXPECT_FALSE(mService->reclaimResource(40, resources));
+ EXPECT_FALSE(mService->reclaimResource(kLowPriorityPid, resources));
- EXPECT_TRUE(mService->reclaimResource(10, resources));
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
// one largest graphic memory from lowest process got reclaimed
- verifyClients(true, false, false);
+ verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
// call again should reclaim another graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, true, false);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
// call again should reclaim another graphic memory from lowest process
- EXPECT_TRUE(mService->reclaimResource(10, resources));
- verifyClients(false, false, true);
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(false /* c1 */, false /* c2 */, true /* c3 */);
// nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ EXPECT_FALSE(mService->reclaimResource(kHighPriorityPid, resources));
}
// ### secure codec can coexist with non-secure codec ###
@@ -423,15 +422,15 @@
Vector<MediaResource> resources;
resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
- EXPECT_TRUE(mService->reclaimResource(10, resources));
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
// one non secure codec from lowest process got reclaimed
- verifyClients(false, true, false);
+ verifyClients(false /* c1 */, true /* c2 */, false /* c3 */);
- // nothing left
- EXPECT_FALSE(mService->reclaimResource(10, resources));
+ // no more non-secure codec, secure codec from lowest priority process will be reclaimed
+ EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
+ verifyClients(true /* c1 */, false /* c2 */, false /* c3 */);
- // clean up client 1 and 3 which still have secure codec left
- mService->removeResource((int64_t) mTestClient1.get());
+ // clean up client 3 which still left
mService->removeResource((int64_t) mTestClient3.get());
}
}
@@ -439,12 +438,12 @@
void testGetLowestPriorityBiggestClient() {
String8 type = String8(kResourceGraphicMemory);
sp<IResourceManagerClient> client;
- EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(10, type, &client));
+ EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
addResource();
- EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(100, type, &client));
- EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(10, type, &client));
+ EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, &client));
+ EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
// kTestPid1 is the lowest priority process with kResourceGraphicMemory.
// mTestClient1 has the largest kResourceGraphicMemory within kTestPid1.