Merge "SoundTrigger: get service by value." into lmp-dev am: d9288b85b3 am: 1382a3a869 am: e18f3154be am: 867bfdd16f am: 38071a6902 am: c4bcf57dc2 am: ff43633d16 am: e0de63ea41 am: 7ce2ff62f7
am: d88949cc01
Change-Id: Ia3d37f1a381e0191e4430611269a13eeceb0f709
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index fb43708..0d689a6 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -37,7 +37,7 @@
mMetadata.clear();
mSurfaceList.clear();
- status_t err;
+ status_t err = OK;
if ((err = mMetadata.readFromParcel(parcel)) != OK) {
ALOGE("%s: Failed to read metadata from parcel", __FUNCTION__);
@@ -65,19 +65,16 @@
}
// Surface.writeToParcel
- const char16_t* name = parcel->readString16Inplace(&len);
- ALOGV("%s: Read surface name = %s", __FUNCTION__,
- name != NULL ? String8(name).string() : "<null>");
- sp<IBinder> binder(parcel->readStrongBinder());
- ALOGV("%s: Read surface binder = %p",
- __FUNCTION__, binder.get());
+ view::Surface surfaceShim;
+ if ((err = surfaceShim.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read output target Surface %d from parcel: %s (%d)",
+ __FUNCTION__, i, strerror(-err), err);
+ return err;
+ }
sp<Surface> surface;
-
- if (binder != NULL) {
- sp<IGraphicBufferProducer> gbp =
- interface_cast<IGraphicBufferProducer>(binder);
- surface = new Surface(gbp);
+ if (surfaceShim.graphicBufferProducer != NULL) {
+ surface = new Surface(surfaceShim.graphicBufferProducer);
}
mSurfaceList.push_back(surface);
@@ -99,7 +96,7 @@
return BAD_VALUE;
}
- status_t err;
+ status_t err = OK;
if ((err = mMetadata.writeToParcel(parcel)) != OK) {
return err;
@@ -111,20 +108,18 @@
parcel->writeInt32(size);
for (int32_t i = 0; i < size; ++i) {
- sp<Surface> surface = mSurfaceList[i];
-
- sp<IBinder> binder;
- if (surface != 0) {
- binder = IInterface::asBinder(surface->getIGraphicBufferProducer());
- }
-
// not sure if readParcelableArray does this, hard to tell from source
parcel->writeString16(String16("android.view.Surface"));
// Surface.writeToParcel
- parcel->writeString16(String16("unknown_name"));
- // Surface.nativeWriteToParcel
- parcel->writeStrongBinder(binder);
+ view::Surface surfaceShim;
+ surfaceShim.name = String16("unknown_name");
+ surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
+ if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
+ __FUNCTION__, i, strerror(-err), err);
+ return err;
+ }
}
parcel->writeInt32(mIsReprocess ? 1 : 0);
diff --git a/include/media/ToneGenerator.h b/include/media/ToneGenerator.h
index 75515ac..a419e17 100644
--- a/include/media/ToneGenerator.h
+++ b/include/media/ToneGenerator.h
@@ -313,7 +313,7 @@
short mA1_Q14; // Q14 coefficient
// delay line of full amplitude generator
- short mS1, mS2; // delay line S2 oldest
+ long mS1, mS2; // delay line S2 oldest
short mS2_0; // saved value for reinitialisation
short mAmplitude_Q15; // Q15 amplitude
};
diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h
index abfe068..a61ddaa 100644
--- a/include/media/stagefright/MediaBuffer.h
+++ b/include/media/stagefright/MediaBuffer.h
@@ -68,11 +68,16 @@
mMemory = mem;
}
- // Decrements the reference count and returns the buffer to its
- // associated MediaBufferGroup if the reference count drops to 0.
+ // If MediaBufferGroup is set, decrement the local reference count;
+ // if the local reference count drops to 0, return the buffer to the
+ // associated MediaBufferGroup.
+ //
+ // If no MediaBufferGroup is set, the local reference count must be zero
+ // when called, whereupon the MediaBuffer is deleted.
virtual void release();
- // Increments the reference count.
+ // Increments the local reference count.
+ // Use only when MediaBufferGroup is set.
virtual void add_ref();
void *data() const;
@@ -97,7 +102,28 @@
// MetaData.
MediaBuffer *clone();
- int refcount() const;
+ // sum of localRefcount() and remoteRefcount()
+ int refcount() const {
+ return localRefcount() + remoteRefcount();
+ }
+
+ int localRefcount() const {
+ return mRefCount;
+ }
+
+ int remoteRefcount() const {
+ if (mMemory.get() == nullptr || mMemory->pointer() == nullptr) return 0;
+ int32_t remoteRefcount =
+ reinterpret_cast<SharedControl *>(mMemory->pointer())->getRemoteRefcount();
+ // Sanity check so that remoteRefCount() is non-negative.
+ return remoteRefcount >= 0 ? remoteRefcount : 0; // do not allow corrupted data.
+ }
+
+ // returns old value
+ int addRemoteRefcount(int32_t value) {
+ if (mMemory.get() == nullptr || mMemory->pointer() == nullptr) return 0;
+ return reinterpret_cast<SharedControl *>(mMemory->pointer())->addRemoteRefcount(value);
+ }
bool isDeadObject() const {
return isDeadObject(mMemory);
@@ -117,25 +143,6 @@
}
protected:
- // MediaBuffer remote releases are handled through a
- // pending release count variable stored in a SharedControl block
- // at the start of the IMemory.
-
- // Returns old value of pending release count.
- inline int32_t addPendingRelease(int32_t value) {
- return getSharedControl()->addPendingRelease(value);
- }
-
- // Issues all pending releases (works in parallel).
- // Assumes there is a MediaBufferObserver.
- inline void resolvePendingRelease() {
- if (mMemory.get() == nullptr) return;
- while (addPendingRelease(-1) > 0) {
- release();
- }
- addPendingRelease(1);
- }
-
// true if MediaBuffer is observed (part of a MediaBufferGroup).
inline bool isObserved() const {
return mObserver != nullptr;
@@ -181,18 +188,18 @@
};
// returns old value
- inline int32_t addPendingRelease(int32_t value) {
+ inline int32_t addRemoteRefcount(int32_t value) {
return std::atomic_fetch_add_explicit(
- &mPendingRelease, (int_least32_t)value, std::memory_order_seq_cst);
+ &mRemoteRefcount, (int_least32_t)value, std::memory_order_seq_cst);
}
- inline int32_t getPendingRelease() const {
- return std::atomic_load_explicit(&mPendingRelease, std::memory_order_seq_cst);
+ inline int32_t getRemoteRefcount() const {
+ return std::atomic_load_explicit(&mRemoteRefcount, std::memory_order_seq_cst);
}
- inline void setPendingRelease(int32_t value) {
+ inline void setRemoteRefcount(int32_t value) {
std::atomic_store_explicit(
- &mPendingRelease, (int_least32_t)value, std::memory_order_seq_cst);
+ &mRemoteRefcount, (int_least32_t)value, std::memory_order_seq_cst);
}
inline bool isDeadObject() const {
@@ -209,13 +216,13 @@
std::atomic_store_explicit(
&mFlags, (int_least32_t)0, std::memory_order_seq_cst);
std::atomic_store_explicit(
- &mPendingRelease, (int_least32_t)0, std::memory_order_seq_cst);
+ &mRemoteRefcount, (int_least32_t)0, std::memory_order_seq_cst);
}
private:
// Caution: atomic_int_fast32_t is 64 bits on LP64.
std::atomic_int_least32_t mFlags;
- std::atomic_int_least32_t mPendingRelease;
+ std::atomic_int_least32_t mRemoteRefcount;
int32_t unused[6]; // additional buffer space
};
diff --git a/include/media/stagefright/MediaBufferGroup.h b/include/media/stagefright/MediaBufferGroup.h
index dfa31b2..3051406 100644
--- a/include/media/stagefright/MediaBufferGroup.h
+++ b/include/media/stagefright/MediaBufferGroup.h
@@ -53,10 +53,7 @@
size_t buffers() const { return mBuffers.size(); }
- // freeBuffers is the number of free buffers allowed to remain.
- void gc(size_t freeBuffers = 0);
-
-protected:
+ // If buffer is nullptr, have acquire_buffer() check for remote release.
virtual void signalBufferReturned(MediaBuffer *buffer);
private:
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index dd94ccf..5289c5f 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -58,9 +58,9 @@
protected:
virtual ~RemoteMediaBufferWrapper() {
- // Indicate to MediaBufferGroup to release.
- int32_t old = addPendingRelease(1);
- ALOGV("RemoteMediaBufferWrapper: releasing %p, old %d", this, old);
+ // Release our interest in the MediaBuffer's shared memory.
+ int32_t old = addRemoteRefcount(-1);
+ ALOGV("RemoteMediaBufferWrapper: releasing %p, refcount %d", this, old - 1);
mMemory.clear(); // don't set the dead object flag.
}
};
@@ -296,8 +296,8 @@
case STOP: {
ALOGV("stop");
CHECK_INTERFACE(IMediaSource, data, reply);
+ mGroup->signalBufferReturned(nullptr);
status_t status = stop();
- mGroup->gc();
mIndexCache.reset();
mBuffersSinceStop = 0;
return status;
@@ -305,6 +305,7 @@
case PAUSE: {
ALOGV("pause");
CHECK_INTERFACE(IMediaSource, data, reply);
+ mGroup->signalBufferReturned(nullptr);
return pause();
}
case GETFORMAT: {
@@ -336,7 +337,7 @@
&& len == sizeof(opts)
&& data.read((void *)&opts, len) == NO_ERROR;
- mGroup->gc(kBinderMediaBuffers /* freeBuffers */);
+ mGroup->signalBufferReturned(nullptr);
mIndexCache.gc();
size_t inlineTransferSize = 0;
status_t ret = NO_ERROR;
@@ -411,10 +412,11 @@
reply->writeInt32(offset);
reply->writeInt32(length);
buf->meta_data()->writeToParcel(*reply);
- if (transferBuf != buf) {
- buf->release();
- } else if (!supportNonblockingRead()) {
- maxNumBuffers = 0; // stop readMultiple with one shared buffer.
+ if (transferBuf == buf) {
+ buf->addRemoteRefcount(1);
+ if (!supportNonblockingRead()) {
+ maxNumBuffers = 0; // stop readMultiple with one shared buffer.
+ }
}
} else {
ALOGV_IF(buf->mMemory != nullptr,
@@ -423,12 +425,12 @@
reply->writeInt32(INLINE_BUFFER);
reply->writeByteArray(length, (uint8_t*)buf->data() + offset);
buf->meta_data()->writeToParcel(*reply);
- buf->release();
inlineTransferSize += length;
if (inlineTransferSize > kInlineMaxTransfer) {
maxNumBuffers = 0; // stop readMultiple if inline transfer is too large.
}
}
+ buf->release();
}
reply->writeInt32(NULL_BUFFER); // Indicate no more MediaBuffers.
reply->writeInt32(ret);
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index 2f53637..9a087ff 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -1612,8 +1612,8 @@
lS1 = (long)0;
lS2 = (long)mS2_0;
} else {
- lS1 = (long)mS1;
- lS2 = (long)mS2;
+ lS1 = mS1;
+ lS2 = mS2;
}
lA1 = (long)mA1_Q14;
lAmplitude = (long)mAmplitude_Q15;
@@ -1649,8 +1649,8 @@
}
// save status
- mS1 = (short)lS1;
- mS2 = (short)lS2;
+ mS1 = lS1;
+ mS2 = lS2;
}
} // end namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index b47a4f1..b742762 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -141,6 +141,17 @@
mAudioSink->flush();
mAudioSink->close();
}
+
+ // Try to avoid racing condition in case callback is still on.
+ Mutex::Autolock autoLock(mLock);
+ mUseAudioCallback = false;
+ flushQueue(&mAudioQueue);
+ flushQueue(&mVideoQueue);
+ mWakeLock.clear();
+ mMediaClock.clear();
+ mVideoScheduler.clear();
+ mNotify.clear();
+ mAudioSink.clear();
}
void NuPlayer::Renderer::queueBuffer(
@@ -744,7 +755,7 @@
case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
{
ALOGV("AudioSink::CB_EVENT_STREAM_END");
- me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+ me->notifyEOSCallback();
break;
}
@@ -759,6 +770,16 @@
return 0;
}
+void NuPlayer::Renderer::notifyEOSCallback() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (!mUseAudioCallback) {
+ return;
+ }
+
+ notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+}
+
size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
Mutex::Autolock autoLock(mLock);
@@ -1190,8 +1211,10 @@
msg->setWhat(kWhatPostDrainVideoQueue);
msg->post(postDelayUs);
mVideoScheduler->restart();
- ALOGI("possible video time jump of %dms or uninitialized media clock, retrying in %dms",
- (int)(delayUs / 1000), (int)(postDelayUs / 1000));
+ ALOGI("possible video time jump of %dms (%lld : %lld) or uninitialized media clock,"
+ " retrying in %dms",
+ (int)(delayUs / 1000), (long long)mediaTimeUs,
+ (long long)mAudioFirstAnchorTimeMediaUs, (int)(postDelayUs / 1000));
mDrainVideoQueuePending = true;
return;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 004e21c..fe7f8fa 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -212,6 +212,7 @@
status_t getCurrentPositionFromAnchor(
int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+ void notifyEOSCallback();
size_t fillAudioBuffer(void *buffer, size_t size);
bool onDrainAudioQueue();
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 8a305de..c4e5df7 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -58,6 +58,7 @@
mDisconnectReplyID(0),
mBuffering(false),
mInPreparationPhase(true),
+ mEOSPending(false),
mSeekGeneration(0),
mEOSTimeoutAudio(0),
mEOSTimeoutVideo(0) {
@@ -200,34 +201,28 @@
status_t finalResult;
if (!source->hasBufferAvailable(&finalResult)) {
if (finalResult == OK) {
- int64_t mediaDurationUs = 0;
- getDuration(&mediaDurationUs);
- sp<AnotherPacketSource> otherSource = getSource(!audio);
- status_t otherFinalResult;
- // If other source already signaled EOS, this source should also signal EOS
- if (otherSource != NULL &&
- !otherSource->hasBufferAvailable(&otherFinalResult) &&
- otherFinalResult == ERROR_END_OF_STREAM) {
- source->signalEOS(ERROR_END_OF_STREAM);
+ // If other source already signaled EOS, this source should also return EOS
+ if (sourceReachedEOS(!audio)) {
return ERROR_END_OF_STREAM;
}
// If this source has detected near end, give it some time to retrieve more
- // data before signaling EOS
+ // data before returning EOS
+ int64_t mediaDurationUs = 0;
+ getDuration(&mediaDurationUs);
if (source->isFinished(mediaDurationUs)) {
int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
if (eosTimeout == 0) {
setEOSTimeout(audio, ALooper::GetNowUs());
} else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
setEOSTimeout(audio, 0);
- source->signalEOS(ERROR_END_OF_STREAM);
return ERROR_END_OF_STREAM;
}
return -EWOULDBLOCK;
}
- if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
+ if (!sourceNearEOS(!audio)) {
// We should not enter buffering mode
// if any of the sources already have detected EOS.
startBufferingIfNecessary();
@@ -306,6 +301,7 @@
mState = SEEKING;
mHandler->seek(seekTimeUs);
+ mEOSPending = false;
}
void NuPlayer::RTSPSource::schedulePollBuffering() {
@@ -314,10 +310,10 @@
}
void NuPlayer::RTSPSource::checkBuffering(
- bool *prepared, bool *underflow, bool *overflow, bool *startServer) {
+ bool *prepared, bool *underflow, bool *overflow, bool *startServer, bool *finished) {
size_t numTracks = mTracks.size();
- size_t preparedCount, underflowCount, overflowCount, startCount;
- preparedCount = underflowCount = overflowCount = startCount = 0;
+ size_t preparedCount, underflowCount, overflowCount, startCount, finishedCount;
+ preparedCount = underflowCount = overflowCount = startCount = finishedCount = 0;
size_t count = numTracks;
for (size_t i = 0; i < count; ++i) {
@@ -337,6 +333,7 @@
if (src->isFinished(/* duration */ 0)) {
++overflowCount;
+ ++finishedCount;
} else {
if (bufferedDurationUs < kUnderflowMarkUs) {
++underflowCount;
@@ -354,11 +351,12 @@
*underflow = (underflowCount > 0);
*overflow = (overflowCount == numTracks);
*startServer = (startCount > 0);
+ *finished = (finishedCount > 0);
}
void NuPlayer::RTSPSource::onPollBuffering() {
- bool prepared, underflow, overflow, startServer;
- checkBuffering(&prepared, &underflow, &overflow, &startServer);
+ bool prepared, underflow, overflow, startServer, finished;
+ checkBuffering(&prepared, &underflow, &overflow, &startServer, &finished);
if (prepared && mInPreparationPhase) {
mInPreparationPhase = false;
@@ -369,8 +367,11 @@
startBufferingIfNecessary();
}
- if (overflow && mHandler != NULL) {
+ if (haveSufficientDataOnAllTracks()) {
stopBufferingIfNecessary();
+ }
+
+ if (overflow && mHandler != NULL) {
mHandler->pause();
}
@@ -378,9 +379,72 @@
mHandler->resume();
}
+ if (finished && mHandler != NULL) {
+ mHandler->cancelAccessUnitTimeoutCheck();
+ }
+
schedulePollBuffering();
}
+void NuPlayer::RTSPSource::signalSourceEOS(status_t result) {
+ const bool audio = true;
+ const bool video = false;
+
+ sp<AnotherPacketSource> source = getSource(audio);
+ if (source != NULL) {
+ source->signalEOS(result);
+ }
+
+ source = getSource(video);
+ if (source != NULL) {
+ source->signalEOS(result);
+ }
+}
+
+bool NuPlayer::RTSPSource::sourceReachedEOS(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
+ status_t finalResult;
+ return (source != NULL &&
+ !source->hasBufferAvailable(&finalResult) &&
+ finalResult == ERROR_END_OF_STREAM);
+}
+
+bool NuPlayer::RTSPSource::sourceNearEOS(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
+ int64_t mediaDurationUs = 0;
+ getDuration(&mediaDurationUs);
+ return (source != NULL && source->isFinished(mediaDurationUs));
+}
+
+void NuPlayer::RTSPSource::onSignalEOS(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mSeekGeneration) {
+ return;
+ }
+
+ if (mEOSPending) {
+ signalSourceEOS(ERROR_END_OF_STREAM);
+ mEOSPending = false;
+ }
+}
+
+void NuPlayer::RTSPSource::postSourceEOSIfNecessary() {
+ const bool audio = true;
+ const bool video = false;
+ // If a source has detected near end, give it some time to retrieve more
+ // data before signaling EOS
+ if (sourceNearEOS(audio) || sourceNearEOS(video)) {
+ if (!mEOSPending) {
+ sp<AMessage> msg = new AMessage(kWhatSignalEOS, this);
+ msg->setInt32("generation", mSeekGeneration);
+ msg->post(kNearEOSTimeoutUs);
+ mEOSPending = true;
+ }
+ }
+}
+
void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
if (msg->what() == kWhatDisconnect) {
sp<AReplyToken> replyID;
@@ -408,6 +472,9 @@
} else if (msg->what() == kWhatPollBuffering) {
onPollBuffering();
return;
+ } else if (msg->what() == kWhatSignalEOS) {
+ onSignalEOS(msg);
+ return;
}
CHECK_EQ(msg->what(), (int)kWhatNotify);
@@ -517,16 +584,10 @@
}
if (err != OK) {
- sp<AnotherPacketSource> source = getSource(false /* audio */);
- if (source != NULL) {
- source->signalEOS(err);
- }
-
- source = getSource(true /* audio */);
- if (source != NULL) {
- source->signalEOS(err);
- }
+ signalSourceEOS(err);
}
+
+ postSourceEOSIfNecessary();
break;
}
@@ -554,6 +615,7 @@
source->queueAccessUnit(accessUnit);
}
+ postSourceEOSIfNecessary();
break;
}
@@ -564,17 +626,7 @@
CHECK_NE(finalResult, (status_t)OK);
if (mTSParser != NULL) {
- sp<AnotherPacketSource> source = getSource(false /* audio */);
- if (source != NULL) {
- source->signalEOS(finalResult);
- }
-
- source = getSource(true /* audio */);
- if (source != NULL) {
- source->signalEOS(finalResult);
- }
-
- return;
+ signalSourceEOS(finalResult);
}
size_t trackIndex;
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index a6a7644..c7834ef 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -64,6 +64,7 @@
kWhatDisconnect = 'disc',
kWhatPerformSeek = 'seek',
kWhatPollBuffering = 'poll',
+ kWhatSignalEOS = 'eos ',
};
enum State {
@@ -106,6 +107,7 @@
Mutex mBufferingLock;
bool mBuffering;
bool mInPreparationPhase;
+ bool mEOSPending;
sp<ALooper> mLooper;
sp<MyHandler> mHandler;
@@ -133,7 +135,12 @@
void performSeek(int64_t seekTimeUs);
void schedulePollBuffering();
- void checkBuffering(bool *prepared, bool *underflow, bool *overflow, bool *startServer);
+ void checkBuffering(
+ bool *prepared,
+ bool *underflow,
+ bool *overflow,
+ bool *startServer,
+ bool *finished);
void onPollBuffering();
bool haveSufficientDataOnAllTracks();
@@ -144,6 +151,13 @@
bool stopBufferingIfNecessary();
void finishSeek(status_t err);
+ void postSourceEOSIfNecessary();
+ void signalSourceEOS(status_t result);
+ void onSignalEOS(const sp<AMessage> &msg);
+
+ bool sourceNearEOS(bool audio);
+ bool sourceReachedEOS(bool audio);
+
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index e087249..893da89 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -1117,6 +1117,9 @@
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->releaseRecordingFrameHandle(handle);
IPCThreadState::self()->restoreCallingIdentity(token);
+ } else {
+ native_handle_close(handle);
+ native_handle_delete(handle);
}
}
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 99a85f5..7cb568d 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -4110,11 +4110,13 @@
if (!mDataSource->getUInt32(offset, &flags)) {
return ERROR_MALFORMED;
}
- ALOGV("fragment run flags: %08x", flags);
-
- if (flags & 0xff000000) {
- return -EINVAL;
- }
+ // |version| only affects SampleCompositionTimeOffset field.
+ // If version == 0, SampleCompositionTimeOffset is uint32_t;
+ // Otherwise, SampleCompositionTimeOffset is int32_t.
+ // Sample.compositionOffset is defined as int32_t.
+ uint8_t version = flags >> 24;
+ flags &= 0xffffff;
+ ALOGV("fragment run version: 0x%02x, flags: 0x%06x", version, flags);
if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) {
// These two shall not be used together.
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 4681abd..f84f484 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -272,6 +272,7 @@
bool mIsHevc;
bool mIsAudio;
bool mIsMPEG4;
+ bool mIsMalformed;
int32_t mTrackId;
int64_t mTrackDurationUs;
int64_t mMaxChunkDurationUs;
@@ -1536,6 +1537,7 @@
mPaused(false),
mResumed(false),
mStarted(false),
+ mIsMalformed(false),
mTrackId(trackId),
mTrackDurationUs(0),
mEstimatedTrackSizeBytes(0),
@@ -2505,13 +2507,15 @@
int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) {
copy->release();
- return ERROR_MALFORMED;
+ mIsMalformed = true;
+ break;
}
int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) {
copy->release();
- return ERROR_MALFORMED;
+ mIsMalformed = true;
+ break;
}
previousPausedDurationUs += pausedDurationUs - lastDurationUs;
@@ -2521,7 +2525,8 @@
timestampUs -= previousPausedDurationUs;
if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
copy->release();
- return ERROR_MALFORMED;
+ mIsMalformed = true;
+ break;
}
if (!mIsAudio) {
@@ -2548,7 +2553,8 @@
timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs;
if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) {
copy->release();
- return ERROR_MALFORMED;
+ mIsMalformed = true;
+ break;
}
timestampUs = decodingTimeUs;
@@ -2560,7 +2566,8 @@
(cttsOffsetTimeUs * mTimeScale + 500000LL) / 1000000LL;
if (WARN_UNLESS(currCttsOffsetTimeTicks <= 0x0FFFFFFFFLL, "for %s track", trackName)) {
copy->release();
- return ERROR_MALFORMED;
+ mIsMalformed = true;
+ break;
}
if (mStszTableEntries->count() == 0) {
@@ -2602,7 +2609,8 @@
if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
copy->release();
- return ERROR_MALFORMED;
+ mIsMalformed = true;
+ break;
}
ALOGV("%s media time stamp: %" PRId64 " and previous paused duration %" PRId64,
@@ -2624,7 +2632,8 @@
(long long)timestampUs, (long long)lastTimestampUs, trackName);
copy->release();
mSource->stop();
- return UNKNOWN_ERROR;
+ mIsMalformed = true;
+ break;
}
// if the duration is different for this sample, see if it is close enough to the previous
@@ -2780,6 +2789,10 @@
}
bool MPEG4Writer::Track::isTrackMalFormed() const {
+ if (mIsMalformed) {
+ return true;
+ }
+
if (mStszTableEntries->count() == 0) { // no samples written
ALOGE("The number of recorded samples is 0");
return true;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index ff5c4d4..e476424 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -2160,14 +2160,19 @@
CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
dstBuffers->clear();
- const Vector<BufferInfo> &srcBuffers = mPortBuffers[portIndex];
+ // If we're using input surface (either non-persistent created by
+ // createInputSurface(), or persistent set by setInputSurface()),
+ // give the client an empty input buffers array.
+ if (portIndex != kPortIndexInput || !mHaveInputSurface) {
+ const Vector<BufferInfo> &srcBuffers = mPortBuffers[portIndex];
- for (size_t i = 0; i < srcBuffers.size(); ++i) {
- const BufferInfo &info = srcBuffers.itemAt(i);
+ for (size_t i = 0; i < srcBuffers.size(); ++i) {
+ const BufferInfo &info = srcBuffers.itemAt(i);
- dstBuffers->push_back(
- (portIndex == kPortIndexInput && mCrypto != NULL)
- ? info.mEncryptedData : info.mData);
+ dstBuffers->push_back(
+ (portIndex == kPortIndexInput && mCrypto != NULL)
+ ? info.mEncryptedData : info.mData);
+ }
}
(new AMessage)->postReply(replyID);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index d2ba02e..be5067d 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -158,11 +158,14 @@
// TODO: Use Flexible color instead
videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
- // For the thumbnail extraction case, try to allocate single buffer
- // in both input and output ports. NOTE: This request may fail if
- // component requires more than that for decoding.
- videoFormat->setInt32("android._num-input-buffers", 1);
- videoFormat->setInt32("android._num-output-buffers", 1);
+ // For the thumbnail extraction case, try to allocate single buffer in both
+ // input and output ports, if seeking to a sync frame. NOTE: This request may
+ // fail if component requires more than that for decoding.
+ bool isSeekingClosest = (seekMode == MediaSource::ReadOptions::SEEK_CLOSEST);
+ if (!isSeekingClosest) {
+ videoFormat->setInt32("android._num-input-buffers", 1);
+ videoFormat->setInt32("android._num-output-buffers", 1);
+ }
status_t err;
sp<ALooper> looper = new ALooper;
@@ -254,6 +257,9 @@
bool isAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
|| !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
+ bool firstSample = true;
+ int64_t targetTimeUs = -1ll;
+
do {
size_t inputIndex = -1;
int64_t ptsUs = 0ll;
@@ -280,6 +286,11 @@
haveMoreInputs = false;
break;
}
+ if (firstSample && isSeekingClosest) {
+ mediaBuffer->meta_data()->findInt64(kKeyTargetTime, &targetTimeUs);
+ ALOGV("Seeking closest: targetTimeUs=%lld", (long long)targetTimeUs);
+ }
+ firstSample = false;
if (mediaBuffer->range_length() > codecBuffer->capacity()) {
ALOGE("buffer size (%zu) too large for codec input size (%zu)",
@@ -292,8 +303,9 @@
memcpy(codecBuffer->data(),
(const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
mediaBuffer->range_length());
- if (isAvcOrHevc && IsIDR(codecBuffer)) {
- // Only need to decode one IDR frame.
+ if (isAvcOrHevc && IsIDR(codecBuffer) && !isSeekingClosest) {
+ // Only need to decode one IDR frame, unless we're seeking with CLOSEST
+ // option, in which case we need to actually decode to targetTimeUs.
haveMoreInputs = false;
flags |= MediaCodec::BUFFER_FLAG_EOS;
}
@@ -340,8 +352,13 @@
ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
err = OK;
} else if (err == OK) {
- ALOGV("Received an output buffer");
- done = true;
+ // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
+ // from the extractor, decode to the specified frame. Otherwise we're done.
+ done = (targetTimeUs < 0ll) || (timeUs >= targetTimeUs);
+ ALOGV("Received an output buffer, timeUs=%lld", (long long)timeUs);
+ if (!done) {
+ err = decoder->releaseOutputBuffer(index);
+ }
} else {
ALOGW("Received error %d (%s) instead of output", err, asString(err));
done = true;
diff --git a/media/libstagefright/foundation/MediaBuffer.cpp b/media/libstagefright/foundation/MediaBuffer.cpp
index 718b7e5..16000ef 100644
--- a/media/libstagefright/foundation/MediaBuffer.cpp
+++ b/media/libstagefright/foundation/MediaBuffer.cpp
@@ -105,14 +105,7 @@
void MediaBuffer::release() {
if (mObserver == NULL) {
- if (mMemory.get() != nullptr) {
- // See if there is a pending release and there are no observers.
- // Ideally this never happens.
- while (addPendingRelease(-1) > 0) {
- __sync_fetch_and_sub(&mRefCount, 1);
- }
- addPendingRelease(1);
- }
+ // Legacy contract for MediaBuffer without a MediaBufferGroup.
CHECK_EQ(mRefCount, 0);
delete this;
return;
@@ -205,10 +198,6 @@
mObserver = observer;
}
-int MediaBuffer::refcount() const {
- return mRefCount;
-}
-
MediaBuffer *MediaBuffer::clone() {
CHECK(mGraphicBuffer == NULL);
diff --git a/media/libstagefright/foundation/MediaBufferGroup.cpp b/media/libstagefright/foundation/MediaBufferGroup.cpp
index cb78879..54f768a 100644
--- a/media/libstagefright/foundation/MediaBufferGroup.cpp
+++ b/media/libstagefright/foundation/MediaBufferGroup.cpp
@@ -51,7 +51,7 @@
for (size_t i = 0; i < buffers; ++i) {
sp<IMemory> mem = memoryDealer->allocate(augmented_size);
- if (mem.get() == nullptr) {
+ if (mem.get() == nullptr || mem->pointer() == nullptr) {
ALOGW("Only allocated %zu shared buffers of size %zu", i, buffer_size);
break;
}
@@ -76,11 +76,24 @@
MediaBufferGroup::~MediaBufferGroup() {
for (MediaBuffer *buffer : mBuffers) {
- buffer->resolvePendingRelease();
- // If we don't release it, perhaps noone will release it.
- LOG_ALWAYS_FATAL_IF(buffer->refcount() != 0,
- "buffer refcount %p = %d != 0", buffer, buffer->refcount());
- // actually delete it.
+ if (buffer->refcount() != 0) {
+ const int localRefcount = buffer->localRefcount();
+ const int remoteRefcount = buffer->remoteRefcount();
+
+ // Fatal if we have a local refcount.
+ LOG_ALWAYS_FATAL_IF(localRefcount != 0,
+ "buffer(%p) localRefcount %d != 0, remoteRefcount %d",
+ buffer, localRefcount, remoteRefcount);
+
+ // Log an error if we have a remaining remote refcount,
+ // as the remote process may have died or may have inappropriate behavior.
+ // The shared memory associated with the MediaBuffer will
+ // automatically be reclaimed when there are no remaining fds
+ // associated with it.
+ ALOGE("buffer(%p) has residual remoteRefcount %d",
+ buffer, remoteRefcount);
+ }
+ // gracefully delete.
buffer->setObserver(nullptr);
buffer->release();
}
@@ -94,32 +107,11 @@
// optionally: mGrowthLimit = max(mGrowthLimit, mBuffers.size());
}
-void MediaBufferGroup::gc(size_t freeBuffers) {
- Mutex::Autolock autoLock(mLock);
-
- size_t freeCount = 0;
- for (auto it = mBuffers.begin(); it != mBuffers.end(); ) {
- (*it)->resolvePendingRelease();
- if ((*it)->isDeadObject()) {
- // The MediaBuffer has been deleted, why is it in the MediaBufferGroup?
- LOG_ALWAYS_FATAL("buffer(%p) has dead object with refcount %d",
- (*it), (*it)->refcount());
- } else if ((*it)->refcount() == 0 && ++freeCount > freeBuffers) {
- (*it)->setObserver(nullptr);
- (*it)->release();
- it = mBuffers.erase(it);
- } else {
- ++it;
- }
- }
-}
-
bool MediaBufferGroup::has_buffers() {
if (mBuffers.size() < mGrowthLimit) {
return true; // We can add more buffers internally.
}
for (MediaBuffer *buffer : mBuffers) {
- buffer->resolvePendingRelease();
if (buffer->refcount() == 0) {
return true;
}
@@ -135,7 +127,6 @@
MediaBuffer *buffer = nullptr;
auto free = mBuffers.end();
for (auto it = mBuffers.begin(); it != mBuffers.end(); ++it) {
- (*it)->resolvePendingRelease();
if ((*it)->refcount() == 0) {
const size_t size = (*it)->size();
if (size >= requestedSize) {
diff --git a/media/libstagefright/foundation/MetaData.cpp b/media/libstagefright/foundation/MetaData.cpp
index b4abc60..a8965f0 100644
--- a/media/libstagefright/foundation/MetaData.cpp
+++ b/media/libstagefright/foundation/MetaData.cpp
@@ -392,8 +392,12 @@
}
status_t MetaData::writeToParcel(Parcel &parcel) {
+ status_t ret;
size_t numItems = mItems.size();
- parcel.writeUint32(uint32_t(numItems));
+ ret = parcel.writeUint32(uint32_t(numItems));
+ if (ret) {
+ return ret;
+ }
for (size_t i = 0; i < numItems; i++) {
int32_t key = mItems.keyAt(i);
const typed_data &item = mItems.valueAt(i);
@@ -401,9 +405,32 @@
const void *data;
size_t size;
item.getData(&type, &data, &size);
- parcel.writeInt32(key);
- parcel.writeUint32(type);
- parcel.writeByteArray(size, (uint8_t*)data);
+ ret = parcel.writeInt32(key);
+ if (ret) {
+ return ret;
+ }
+ ret = parcel.writeUint32(type);
+ if (ret) {
+ return ret;
+ }
+ if (type == TYPE_NONE) {
+ android::Parcel::WritableBlob blob;
+ ret = parcel.writeUint32(static_cast<uint32_t>(size));
+ if (ret) {
+ return ret;
+ }
+ ret = parcel.writeBlob(size, false, &blob);
+ if (ret) {
+ return ret;
+ }
+ memcpy(blob.data(), data, size);
+ blob.release();
+ } else {
+ ret = parcel.writeByteArray(size, (uint8_t*)data);
+ if (ret) {
+ return ret;
+ }
+ }
}
return OK;
}
@@ -422,8 +449,20 @@
if (ret != OK) {
break;
}
- // copy data directly from Parcel storage, then advance position
- setData(key, type, parcel.readInplace(size), size);
+ // copy data from Blob, which may be inline in Parcel storage,
+ // then advance position
+ if (type == TYPE_NONE) {
+ android::Parcel::ReadableBlob blob;
+ ret = parcel.readBlob(size, &blob);
+ if (ret != OK) {
+ break;
+ }
+ setData(key, type, blob.data(), size);
+ blob.release();
+ } else {
+ // copy data directly from Parcel storage, then advance position
+ setData(key, type, parcel.readInplace(size), size);
+ }
}
return OK;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5ae5ded..35d5de6 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -432,7 +432,7 @@
|| (index > (OMX_INDEXTYPE)OMX_IndexExtAudioStartUnused
&& index <= (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported)
|| (index > (OMX_INDEXTYPE)OMX_IndexExtVideoStartUnused
- && index <= (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh)
+ && index <= (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering)
|| (index > (OMX_INDEXTYPE)OMX_IndexExtOtherStartUnused
&& index <= (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits)) {
return false;
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 42a1182..76e2e6e 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -1408,6 +1408,11 @@
msg->post((mKeepAliveTimeoutUs * 9) / 10);
}
+ void cancelAccessUnitTimeoutCheck() {
+ ALOGV("cancelAccessUnitTimeoutCheck");
+ ++mCheckGeneration;
+ }
+
void postAccessUnitTimeoutCheck() {
if (mCheckPending) {
return;
@@ -1792,14 +1797,8 @@
// Time is now established, lets start timestamping immediately
for (size_t i = 0; i < mTracks.size(); ++i) {
- TrackInfo *trackInfo = &mTracks.editItemAt(i);
- while (!trackInfo->mPackets.empty()) {
- sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
- trackInfo->mPackets.erase(trackInfo->mPackets.begin());
-
- if (addMediaTimestamp(i, trackInfo, accessUnit)) {
- postQueueAccessUnit(i, accessUnit);
- }
+ if (OK != processAccessUnitQueue(i)) {
+ return;
}
}
for (size_t i = 0; i < mTracks.size(); ++i) {
@@ -1812,26 +1811,8 @@
}
}
- void onAccessUnitComplete(
- int32_t trackIndex, const sp<ABuffer> &accessUnit) {
- ALOGV("onAccessUnitComplete track %d", trackIndex);
-
+ status_t processAccessUnitQueue(int32_t trackIndex) {
TrackInfo *track = &mTracks.editItemAt(trackIndex);
- if(!mPlayResponseParsed){
- uint32_t seqNum = (uint32_t)accessUnit->int32Data();
- ALOGI("play response is not parsed, storing accessunit %u", seqNum);
- track->mPackets.push_back(accessUnit);
- return;
- }
-
- handleFirstAccessUnit();
-
- if (!mAllTracksHaveTime) {
- ALOGV("storing accessUnit, no time established yet");
- track->mPackets.push_back(accessUnit);
- return;
- }
-
while (!track->mPackets.empty()) {
sp<ABuffer> accessUnit = *track->mPackets.begin();
track->mPackets.erase(track->mPackets.begin());
@@ -1842,27 +1823,29 @@
// by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
// RTSP "PLAY" command should be used to detect the first RTP packet
// after seeking.
- if (track->mAllowedStaleAccessUnits > 0) {
- uint32_t seqNum16 = seqNum & 0xffff;
- uint32_t firstSeqNumInSegment16 = track->mFirstSeqNumInSegment & 0xffff;
- if (seqNum16 > firstSeqNumInSegment16 + kMaxAllowedStaleAccessUnits
- || seqNum16 < firstSeqNumInSegment16) {
- // Not the first rtp packet of the stream after seeking, discarding.
- track->mAllowedStaleAccessUnits--;
- ALOGV("discarding stale access unit (0x%x : 0x%x)",
- seqNum, track->mFirstSeqNumInSegment);
- continue;
+ if (mSeekable) {
+ if (track->mAllowedStaleAccessUnits > 0) {
+ uint32_t seqNum16 = seqNum & 0xffff;
+ uint32_t firstSeqNumInSegment16 = track->mFirstSeqNumInSegment & 0xffff;
+ if (seqNum16 > firstSeqNumInSegment16 + kMaxAllowedStaleAccessUnits
+ || seqNum16 < firstSeqNumInSegment16) {
+ // Not the first rtp packet of the stream after seeking, discarding.
+ track->mAllowedStaleAccessUnits--;
+ ALOGV("discarding stale access unit (0x%x : 0x%x)",
+ seqNum, track->mFirstSeqNumInSegment);
+ continue;
+ }
+ ALOGW_IF(seqNum16 != firstSeqNumInSegment16,
+ "Missing the first packet(%u), now take packet(%u) as first one",
+ track->mFirstSeqNumInSegment, seqNum);
+ } else { // track->mAllowedStaleAccessUnits <= 0
+ mNumAccessUnitsReceived = 0;
+ ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
+ "Still no first rtp packet after %d stale ones",
+ kMaxAllowedStaleAccessUnits);
+ track->mAllowedStaleAccessUnits = -1;
+ return UNKNOWN_ERROR;
}
- ALOGW_IF(seqNum16 != firstSeqNumInSegment16,
- "Missing the first packet(%u), now take packet(%u) as first one",
- track->mFirstSeqNumInSegment, seqNum);
- } else { // track->mAllowedStaleAccessUnits <= 0
- mNumAccessUnitsReceived = 0;
- ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
- "Still no first rtp packet after %d stale ones",
- kMaxAllowedStaleAccessUnits);
- track->mAllowedStaleAccessUnits = -1;
- return;
}
// Now found the first rtp packet of the stream after seeking.
@@ -1876,14 +1859,35 @@
continue;
}
-
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
postQueueAccessUnit(trackIndex, accessUnit);
}
}
+ return OK;
+ }
- if (addMediaTimestamp(trackIndex, track, accessUnit)) {
- postQueueAccessUnit(trackIndex, accessUnit);
+ void onAccessUnitComplete(
+ int32_t trackIndex, const sp<ABuffer> &accessUnit) {
+ TrackInfo *track = &mTracks.editItemAt(trackIndex);
+ track->mPackets.push_back(accessUnit);
+
+ uint32_t seqNum = (uint32_t)accessUnit->int32Data();
+ ALOGV("onAccessUnitComplete track %d storing accessunit %u", trackIndex, seqNum);
+
+ if(!mPlayResponseParsed){
+ ALOGV("play response is not parsed");
+ return;
+ }
+
+ handleFirstAccessUnit();
+
+ if (!mAllTracksHaveTime) {
+ ALOGV("storing accessUnit, no time established yet");
+ return;
+ }
+
+ if (OK != processAccessUnitQueue(trackIndex)) {
+ return;
}
if (track->mEOSReceived) {
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index 1118959..face727 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -343,9 +343,9 @@
const void *key;
size_t keysize;
- if (meta->findData(kKeyCryptoIV, &type, &key, &keysize)) {
+ if (meta->findData(kKeyCryptoKey, &type, &key, &keysize)) {
if (keysize != 16) {
- // IVs must be 16 bytes in length.
+ // Keys must be 16 bytes in length.
return NULL;
}
}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 79f4a66..1785a03 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1104,14 +1104,20 @@
// AUDIO_IO_HANDLE_NONE means the parameters are global to the audio hardware interface
if (ioHandle == AUDIO_IO_HANDLE_NONE) {
Mutex::Autolock _l(mLock);
- status_t final_result = NO_ERROR;
+ // result will remain NO_INIT if no audio device is present
+ status_t final_result = NO_INIT;
{
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_PARAMETER;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
status_t result = dev->set_parameters(dev, keyValuePairs.string());
- final_result = result ?: final_result;
+ // return success if at least one audio device accepts the parameters as not all
+ // HALs are requested to support all parameters. If no audio device supports the
+ // requested parameters, the last error is reported.
+ if (final_result != NO_ERROR) {
+ final_result = result;
+ }
}
mHardwareStatus = AUDIO_HW_IDLE;
}
diff --git a/services/audioflinger/BufferProviders.cpp b/services/audioflinger/BufferProviders.cpp
index 2ca2cac..7b6dfcb 100644
--- a/services/audioflinger/BufferProviders.cpp
+++ b/services/audioflinger/BufferProviders.cpp
@@ -474,18 +474,18 @@
ALOGV("processFrames(%zu %zu) remaining(%zu)", *dstFrames, *srcFrames, mRemaining);
// Note dstFrames is the required number of frames.
- // Ensure consumption from src is as expected.
- //TODO: add logic to track "very accurate" consumption related to speed, original sampling
- //rate, actual frames processed.
- const size_t targetSrc = *dstFrames * mPlaybackRate.mSpeed;
- if (*srcFrames < targetSrc) { // limit dst frames to that possible
- *dstFrames = *srcFrames / mPlaybackRate.mSpeed;
- } else if (*srcFrames > targetSrc + 1) {
- *srcFrames = targetSrc + 1;
- }
-
if (!mAudioPlaybackRateValid) {
//fallback mode
+ // Ensure consumption from src is as expected.
+ // TODO: add logic to track "very accurate" consumption related to speed, original sampling
+ // rate, actual frames processed.
+
+ const size_t targetSrc = *dstFrames * mPlaybackRate.mSpeed;
+ if (*srcFrames < targetSrc) { // limit dst frames to that possible
+ *dstFrames = *srcFrames / mPlaybackRate.mSpeed;
+ } else if (*srcFrames > targetSrc + 1) {
+ *srcFrames = targetSrc + 1;
+ }
if (*dstFrames > 0) {
switch(mPlaybackRate.mFallbackMode) {
case AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT:
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index a671128..6aedd29 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3106,9 +3106,9 @@
if (!keepWakeLock()) {
releaseWakeLock_l();
released = true;
+ mWakeLockUids.clear();
+ mActiveTracksGeneration++;
}
- mWakeLockUids.clear();
- mActiveTracksGeneration++;
ALOGV("wait async completion");
mWaitWorkCV.wait(mLock);
ALOGV("async completion/wake");
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index b752541..1ddfb4d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1321,9 +1321,11 @@
desc->isActive() &&
outputDesc->sharesHwModuleWith(desc) &&
(newDevice != desc->device())) {
+ audio_devices_t newDevice2 = getNewOutputDevice(desc, false /*fromCache*/);
+ bool force = desc->device() != newDevice2;
setOutputDevice(desc,
- getNewOutputDevice(desc, false /*fromCache*/),
- true,
+ newDevice2,
+ force,
outputDesc->latency()*2);
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index aeab451..48a2a99 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -170,6 +170,9 @@
return res;
}
+ /** Register in-flight map to the status tracker */
+ mInFlightStatusId = mStatusTracker->addComponent();
+
/** Create buffer manager */
mBufferManager = new Camera3BufferManager();
@@ -2196,6 +2199,10 @@
aeTriggerCancelOverride));
if (res < 0) return res;
+ if (mInFlightMap.size() == 1) {
+ mStatusTracker->markComponentActive(mInFlightStatusId);
+ }
+
return OK;
}
@@ -2252,6 +2259,11 @@
mInFlightMap.removeItemsAt(idx, 1);
+ // Indicate idle inFlightMap to the status tracker
+ if (mInFlightMap.size() == 0) {
+ mStatusTracker->markComponentIdle(mInFlightStatusId, Fence::NO_FENCE);
+ }
+
ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 3244258..17893a9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -725,6 +725,7 @@
Mutex mInFlightLock; // Protects mInFlightMap
InFlightMap mInFlightMap;
+ int mInFlightStatusId;
status_t registerInFlight(uint32_t frameNumber,
int32_t numBuffers, CaptureResultExtras resultExtras, bool hasInput,