Merge "Workaround for track recreation bug" into jb-mr1-dev
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index 840b4aa..9e07ea4 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -167,6 +167,8 @@
// this list in signalBufferReturned
Vector<sp<GraphicBuffer> > mCurrentBuffers;
+ size_t mNumPendingBuffers;
+
// mCurrentTimestamp is the timestamp for the current texture. It
// gets set to mLastQueuedTimestamp each time updateTexImage is called.
int64_t mCurrentTimestamp;
@@ -202,10 +204,14 @@
// offset timestamps.
int64_t mStartTimeNs;
+ size_t mMaxAcquiredBufferCount;
+
// mFrameAvailableCondition condition used to indicate whether there
// is a frame available for dequeuing
Condition mFrameAvailableCondition;
+ Condition mMediaBuffersAvailableCondition;
+
// Avoid copying and equating and default constructor
DISALLOW_IMPLICIT_CONSTRUCTORS(SurfaceMediaSource);
};
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 867f76d..e224437 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -18,8 +18,8 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/SurfaceMediaSource.h>
-#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
#include <OMX_IVCommon.h>
#include <MetadataBufferType.h>
@@ -39,12 +39,14 @@
mWidth(bufferWidth),
mHeight(bufferHeight),
mCurrentSlot(BufferQueue::INVALID_BUFFER_SLOT),
+ mNumPendingBuffers(0),
mCurrentTimestamp(0),
mFrameRate(30),
mStopped(false),
mNumFramesReceived(0),
mNumFramesEncoded(0),
- mFirstFrameTimestamp(0)
+ mFirstFrameTimestamp(0),
+ mMaxAcquiredBufferCount(4) // XXX double-check the default
{
ALOGV("SurfaceMediaSource");
@@ -155,20 +157,32 @@
ALOGE("bufferCount %d is too small", bufferCount);
return BAD_VALUE;
}
+
+ mMaxAcquiredBufferCount = bufferCount;
}
- if (bufferCount != 0) {
- status_t err = mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
- if (err != OK) {
- return err;
- }
+ CHECK_GT(mMaxAcquiredBufferCount, 1);
+
+ status_t err =
+ mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
+
+ if (err != OK) {
+ return err;
}
+ mNumPendingBuffers = 0;
+
return OK;
}
status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
- return mBufferQueue->setMaxAcquiredBufferCount(count);
+ ALOGV("setMaxAcquiredBufferCount(%d)", count);
+ Mutex::Autolock lock(mMutex);
+
+ CHECK_GT(count, 1);
+ mMaxAcquiredBufferCount = count;
+
+ return OK;
}
@@ -216,9 +230,8 @@
// Note: Call only when you have the lock
static void passMetadataBuffer(MediaBuffer **buffer,
buffer_handle_t bufferHandle) {
- // MediaBuffer allocates and owns this data
- MediaBuffer *tempBuffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
- char *data = (char *)tempBuffer->data();
+ *buffer = new MediaBuffer(4 + sizeof(buffer_handle_t));
+ char *data = (char *)(*buffer)->data();
if (data == NULL) {
ALOGE("Cannot allocate memory for metadata buffer!");
return;
@@ -226,7 +239,6 @@
OMX_U32 type = kMetadataBufferTypeGrallocSource;
memcpy(data, &type, 4);
memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
- *buffer = tempBuffer;
ALOGV("handle = %p, , offset = %d, length = %d",
bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
@@ -240,6 +252,10 @@
*buffer = NULL;
+ while (!mStopped && mNumPendingBuffers == mMaxAcquiredBufferCount) {
+ mMediaBuffersAvailableCondition.wait(mMutex);
+ }
+
// Update the current buffer info
// TODO: mCurrentSlot can be made a bufferstate since there
// can be more than one "current" slots.
@@ -306,6 +322,7 @@
mNumFramesEncoded++;
// Pass the data to the MediaBuffer. Pass in only the metadata
+
passMetadataBuffer(buffer, mBufferSlot[mCurrentSlot]->handle);
(*buffer)->setObserver(this);
@@ -315,6 +332,7 @@
mNumFramesEncoded, mCurrentTimestamp / 1000,
mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
+ ++mNumPendingBuffers;
return OK;
}
@@ -371,6 +389,9 @@
if (!foundBuffer) {
CHECK(!"signalBufferReturned: bogus buffer");
}
+
+ --mNumPendingBuffers;
+ mMediaBuffersAvailableCondition.broadcast();
}
// Part of the BufferQueue::ConsumerListener
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 0b29df9..c4845e3 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -20,6 +20,7 @@
#include "Converter.h"
+#include <cutils/properties.h>
#include <gui/SurfaceTextureClient.h>
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -48,6 +49,10 @@
mEncoder->release();
mEncoder.clear();
}
+
+ AString mime;
+ CHECK(mInputFormat->findString("mime", &mime));
+ ALOGI("encoder (%s) shut down.", mime.c_str());
}
status_t Converter::initCheck() const {
@@ -62,6 +67,20 @@
return mOutputFormat;
}
+static int32_t getBitrate(const char *propName, int32_t defaultValue) {
+ char val[PROPERTY_VALUE_MAX];
+ if (property_get(propName, val, NULL)) {
+ char *end;
+ unsigned long x = strtoul(val, &end, 10);
+
+ if (*end == '\0' && end > val && x > 0) {
+ return x;
+ }
+ }
+
+ return defaultValue;
+}
+
status_t Converter::initEncoder() {
AString inputMIME;
CHECK(mInputFormat->findString("mime", &inputMIME));
@@ -87,11 +106,17 @@
mOutputFormat = mInputFormat->dup();
mOutputFormat->setString("mime", outputMIME.c_str());
+ int32_t audioBitrate = getBitrate("media.wfd.audio-bitrate", 64000);
+ int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 10000000);
+
+ ALOGI("using audio bitrate of %d bps, video bitrate of %d bps",
+ audioBitrate, videoBitrate);
+
if (isAudio) {
- mOutputFormat->setInt32("bitrate", 64000); // 64 kBit/sec
+ mOutputFormat->setInt32("bitrate", audioBitrate);
} else {
- mOutputFormat->setInt32("bitrate", 10000000); // 10Mbit/sec
- mOutputFormat->setInt32("frame-rate", 60);
+ mOutputFormat->setInt32("bitrate", videoBitrate);
+ mOutputFormat->setInt32("frame-rate", 30);
mOutputFormat->setInt32("i-frame-interval", 3); // Iframes every 3 secs
}
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp
index 786029a..35ae539 100644
--- a/media/libstagefright/wifi-display/source/MediaPuller.cpp
+++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp
@@ -33,7 +33,13 @@
const sp<MediaSource> &source, const sp<AMessage> ¬ify)
: mSource(source),
mNotify(notify),
- mPullGeneration(0) {
+ mPullGeneration(0),
+ mIsAudio(false) {
+ sp<MetaData> meta = source->getFormat();
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ mIsAudio = !strncasecmp(mime, "audio/", 6);
}
MediaPuller::~MediaPuller() {
@@ -77,7 +83,14 @@
schedulePull();
}
} else {
+ sp<MetaData> meta = mSource->getFormat();
+ const char *tmp;
+ CHECK(meta->findCString(kKeyMIMEType, &tmp));
+ AString mime = tmp;
+
+ ALOGI("MediaPuller(%s) stopping.", mime.c_str());
err = mSource->stop();
+ ALOGI("MediaPuller(%s) stopped.", mime.c_str());
++mPullGeneration;
}
@@ -124,7 +137,15 @@
mbuf->range_length());
accessUnit->meta()->setInt64("timeUs", timeUs);
- accessUnit->meta()->setPointer("mediaBuffer", mbuf);
+
+ if (mIsAudio) {
+ mbuf->release();
+ mbuf = NULL;
+ } else {
+ // video encoder will release MediaBuffer when done
+ // with underlying data.
+ accessUnit->meta()->setPointer("mediaBuffer", mbuf);
+ }
sp<AMessage> notify = mNotify->dup();
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.h b/media/libstagefright/wifi-display/source/MediaPuller.h
index 5297501..134e1c0 100644
--- a/media/libstagefright/wifi-display/source/MediaPuller.h
+++ b/media/libstagefright/wifi-display/source/MediaPuller.h
@@ -49,6 +49,7 @@
sp<MediaSource> mSource;
sp<AMessage> mNotify;
int32_t mPullGeneration;
+ bool mIsAudio;
status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
void schedulePull();
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 5d0ddf1..c38a300 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -159,6 +159,8 @@
err = mMediaPuller->stop();
}
+ mConverter.clear();
+
mStarted = false;
return err;
@@ -191,7 +193,8 @@
mNumSRsSent(0),
mSendSRPending(false),
mFirstPacketTimeUs(-1ll),
- mHistoryLength(0)
+ mHistoryLength(0),
+ mTotalBytesSent(0ll)
#if LOG_TRANSPORT_STREAM
,mLogFile(NULL)
#endif
@@ -288,41 +291,6 @@
mLogFile = NULL;
}
#endif
-
- mTracks.clear();
-
- mPacketizer.clear();
-
- if (mSerializer != NULL) {
- mSerializer->stop();
-
- looper()->unregisterHandler(mSerializer->id());
- mSerializer.clear();
- }
-
- mTracks.clear();
-
- if (mSerializerLooper != NULL) {
- mSerializerLooper->stop();
- mSerializerLooper.clear();
- }
-
- if (mLegacyMode) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder = sm->getService(String16("SurfaceFlinger"));
- sp<ISurfaceComposer> service = interface_cast<ISurfaceComposer>(binder);
- CHECK(service != NULL);
-
- service->connectDisplay(NULL);
- }
-
- if (mRTCPSessionID != 0) {
- mNetSession->destroySession(mRTCPSessionID);
- }
-
- if (mRTPSessionID != 0) {
- mNetSession->destroySession(mRTPSessionID);
- }
}
int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const {
@@ -369,6 +337,45 @@
return OK;
}
+status_t WifiDisplaySource::PlaybackSession::destroy() {
+ mTracks.clear();
+
+ mPacketizer.clear();
+
+ if (mSerializer != NULL) {
+ mSerializer->stop();
+
+ looper()->unregisterHandler(mSerializer->id());
+ mSerializer.clear();
+ }
+
+ mTracks.clear();
+
+ if (mSerializerLooper != NULL) {
+ mSerializerLooper->stop();
+ mSerializerLooper.clear();
+ }
+
+ if (mLegacyMode) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("SurfaceFlinger"));
+ sp<ISurfaceComposer> service = interface_cast<ISurfaceComposer>(binder);
+ CHECK(service != NULL);
+
+ service->connectDisplay(NULL);
+ }
+
+ if (mRTCPSessionID != 0) {
+ mNetSession->destroySession(mRTCPSessionID);
+ }
+
+ if (mRTPSessionID != 0) {
+ mNetSession->destroySession(mRTPSessionID);
+ }
+
+ return OK;
+}
+
void WifiDisplaySource::PlaybackSession::onMessageReceived(
const sp<AMessage> &msg) {
switch (msg->what()) {
@@ -785,8 +792,7 @@
}
// Add one reference to account for the serializer.
- // Add another two for unknown reasons.
- err = source->setMaxAcquiredBufferCount(15); // XXX numInputBuffers + 2);
+ err = source->setMaxAcquiredBufferCount(numInputBuffers);
CHECK_EQ(err, (status_t)OK);
mBufferQueue = source->getBufferQueue();
@@ -1018,6 +1024,14 @@
} else {
mNetSession->sendRequest(
mRTPSessionID, rtp, mTSQueue->size());
+
+ mTotalBytesSent += mTSQueue->size();
+ int64_t delayUs = ALooper::GetNowUs() - mFirstPacketTimeUs;
+
+ if (delayUs > 0ll) {
+ ALOGV("approx. net bandwidth used: %.2f Mbit/sec",
+ mTotalBytesSent * 8.0 / delayUs);
+ }
}
mTSQueue->setInt32Data(mRTPSeqNo - 1);
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
index 528a039..88f6ea9 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.h
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.h
@@ -44,6 +44,8 @@
const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
bool useInterleavedTCP);
+ status_t destroy();
+
int32_t getRTPPort() const;
int64_t getLastLifesignUs() const;
@@ -125,6 +127,8 @@
List<sp<ABuffer> > mHistory;
size_t mHistoryLength;
+ uint64_t mTotalBytesSent;
+
#if LOG_TRANSPORT_STREAM
FILE *mLogFile;
#endif
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index 8e8f04a..aeefcf3 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -200,11 +200,14 @@
CHECK(msg->senderAwaitsResponse(&replyID));
for (size_t i = mPlaybackSessions.size(); i-- > 0;) {
- const sp<PlaybackSession> &playbackSession =
+ sp<PlaybackSession> playbackSession =
mPlaybackSessions.valueAt(i);
- looper()->unregisterHandler(playbackSession->id());
mPlaybackSessions.removeItemsAt(i);
+
+ playbackSession->destroy();
+ looper()->unregisterHandler(playbackSession->id());
+ playbackSession.clear();
}
if (mClient != NULL) {
@@ -454,9 +457,7 @@
const ClientInfo &info = mClientInfos.valueFor(sessionID);
request.append(StringPrintf("Session: %d\r\n", info.mPlaybackSessionID));
-
- request.append("Content-Length: 0\r\n");
- request.append("\r\n");
+ request.append("\r\n"); // Empty body
status_t err =
mNetSession->sendRequest(sessionID, request.c_str(), request.size());
@@ -761,7 +762,7 @@
return;
}
#if 1
- // The LG dongle doesn't specify client_port=xxx apparently.
+ // The older LG dongles doesn't specify client_port=xxx apparently.
} else if (transport == "RTP/AVP/UDP;unicast") {
clientRtp = 19000;
clientRtcp = clientRtp + 1;
@@ -966,19 +967,21 @@
int32_t cseq,
const sp<ParsedMessage> &data) {
int32_t playbackSessionID;
-#if 0
- // XXX the dongle does not include a "Session:" header in this request.
sp<PlaybackSession> playbackSession =
findPlaybackSession(data, &playbackSessionID);
+#if 1
+ // XXX the older dongles do not include a "Session:" header in this request.
+ if (playbackSession == NULL) {
+ CHECK_EQ(mPlaybackSessions.size(), 1u);
+ playbackSessionID = mPlaybackSessions.keyAt(0);
+ playbackSession = mPlaybackSessions.valueAt(0);
+ }
+#else
if (playbackSession == NULL) {
sendErrorResponse(sessionID, "454 Session Not Found", cseq);
return;
}
-#else
- CHECK_EQ(mPlaybackSessions.size(), 1u);
- playbackSessionID = mPlaybackSessions.keyAt(0);
- sp<PlaybackSession> playbackSession = mPlaybackSessions.valueAt(0);
#endif
playbackSession->updateLiveness();
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 27e2ed2..a44fb3e 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -5417,12 +5417,12 @@
/*static*/ void AudioFlinger::RecordThread::RecordTrack::appendDumpHeader(String8& result)
{
- result.append(" Clien Fmt Chn mask Session Buf S SRate Serv User\n");
+ result.append(" Clien Fmt Chn mask Session Buf S SRate Serv User FrameCount\n");
}
void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size)
{
- snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %05u %08x %08x\n",
+ snprintf(buffer, size, " %05d %03u 0x%08x %05d %04u %01d %05u %08x %08x %05d\n",
(mClient == 0) ? getpid_cached : mClient->pid(),
mFormat,
mChannelMask,
@@ -5431,7 +5431,8 @@
mState,
mCblk->sampleRate,
mCblk->server,
- mCblk->user);
+ mCblk->user,
+ mCblk->frameCount);
}
@@ -5983,6 +5984,9 @@
inputStandBy();
acquireWakeLock();
+ // used to verify we've read at least one before evaluating how many bytes were read
+ bool readOnce = false;
+
// start recording
while (!exitPending()) {
@@ -6013,10 +6017,10 @@
if (mReqChannelCount != mActiveTrack->channelCount()) {
mActiveTrack.clear();
mStartStopCond.broadcast();
- } else if (mBytesRead != 0) {
+ } else if (readOnce) {
// record start succeeds only if first read from audio input
// succeeds
- if (mBytesRead > 0) {
+ if (mBytesRead >= 0) {
mActiveTrack->mState = TrackBase::ACTIVE;
} else {
mActiveTrack.clear();
@@ -6045,6 +6049,7 @@
buffer.frameCount = mFrameCount;
if (CC_LIKELY(mActiveTrack->getNextBuffer(&buffer) == NO_ERROR)) {
+ readOnce = true;
size_t framesOut = buffer.frameCount;
if (mResampler == NULL) {
// no resampling
@@ -6079,9 +6084,10 @@
mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mInputBytes);
mRsmpInIndex = 0;
}
- if (mBytesRead < 0) {
- ALOGE("Error reading audio input");
- if (mActiveTrack->mState == TrackBase::ACTIVE) {
+ if (mBytesRead <= 0) {
+ if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE))
+ {
+ ALOGE("Error reading audio input");
// Force input into standby so that it tries to
// recover at next read attempt
inputStandBy();
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
index 2f8b7db..678f114 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
@@ -253,6 +253,7 @@
CaptureSequencer::CaptureState CaptureSequencer::manageZslStart(
sp<Camera2Client> &client) {
+ ALOGV("%s", __FUNCTION__);
status_t res;
sp<ZslProcessor> processor = mZslProcessor.promote();
if (processor == 0) {
@@ -271,7 +272,12 @@
return DONE;
}
// TODO: Actually select the right thing here.
- processor->pushToReprocess(mCaptureId);
+ res = processor->pushToReprocess(mCaptureId);
+ if (res != OK) {
+ ALOGW("%s: Camera %d: Failed to use ZSL queue, falling back to standard capture",
+ __FUNCTION__, client->getCameraId());
+ return STANDARD_START;
+ }
mTimeoutCount = kMaxTimeoutsForCaptureEnd;
return STANDARD_CAPTURE_WAIT;
@@ -279,11 +285,13 @@
CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting(
sp<Camera2Client> &client) {
+ ALOGV("%s", __FUNCTION__);
return DONE;
}
CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing(
sp<Camera2Client> &client) {
+ ALOGV("%s", __FUNCTION__);
return START;
}
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
index 0771872..58e820c 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
@@ -235,9 +235,19 @@
if (client == 0) return false;
if (mZslQueueTail != mZslQueueHead) {
+ CameraMetadata request;
+ size_t index = mZslQueueTail;
+ while (request.isEmpty() && index != mZslQueueHead) {
+ request = mZslQueue[index].frame;
+ index = (index + 1) % kZslBufferDepth;
+ }
+ if (request.isEmpty()) {
+ ALOGE("No request in ZSL queue to send!");
+ return BAD_VALUE;
+ }
buffer_handle_t *handle =
- &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
- CameraMetadata request = mZslQueue[mZslQueueTail].frame;
+ &(mZslQueue[index].buffer.mGraphicBuffer->handle);
+
uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
res = request.update(ANDROID_REQUEST_TYPE,
&requestType, 1);
@@ -306,19 +316,25 @@
status_t ZslProcessor::processNewZslBuffer(sp<Camera2Client> &client) {
ATRACE_CALL();
status_t res;
+
+ ALOGVV("Trying to get next buffer");
+ BufferItemConsumer::BufferItem item;
+ res = mZslConsumer->acquireBuffer(&item);
+ if (res != OK) {
+ if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+ ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+ "%s (%d)", __FUNCTION__,
+ client->getCameraId(), strerror(-res), res);
+ } else {
+ ALOGVV(" No buffer");
+ }
+ return res;
+ }
+
Mutex::Autolock l(mInputMutex);
if (mState == LOCKED) {
- BufferItemConsumer::BufferItem item;
- res = mZslConsumer->acquireBuffer(&item);
- if (res != OK) {
- if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
- ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
- "%s (%d)", __FUNCTION__,
- client->getCameraId(), strerror(-res), res);
- }
- return res;
- }
+ ALOGVV("In capture, discarding new ZSL buffers");
mZslConsumer->releaseBuffer(item);
return OK;
}
@@ -326,6 +342,7 @@
ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail);
if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) {
+ ALOGVV("Releasing oldest buffer");
mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
mZslQueue.replaceAt(mZslQueueTail);
mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth;
@@ -333,20 +350,12 @@
ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead);
- res = mZslConsumer->acquireBuffer(&(queueHead.buffer));
- if (res != OK) {
- if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
- ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
- "%s (%d)", __FUNCTION__,
- client->getCameraId(), strerror(-res), res);
- }
- return res;
- }
+ queueHead.buffer = item;
queueHead.frame.release();
mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth;
- ALOGVV(" Added buffer, timestamp %lld", queueHead.buffer.mTimestamp);
+ ALOGVV(" Acquired buffer, timestamp %lld", queueHead.buffer.mTimestamp);
findMatchesLocked();
@@ -354,9 +363,20 @@
}
void ZslProcessor::findMatchesLocked() {
+ ALOGVV("Scanning");
for (size_t i = 0; i < mZslQueue.size(); i++) {
ZslPair &queueEntry = mZslQueue.editItemAt(i);
nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
+ IF_ALOGV() {
+ camera_metadata_entry_t entry;
+ nsecs_t frameTimestamp = 0;
+ if (!queueEntry.frame.isEmpty()) {
+ entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
+ frameTimestamp = entry.data.i64[0];
+ }
+ ALOGVV(" %d: b: %lld\tf: %lld", i,
+ bufferTimestamp, frameTimestamp );
+ }
if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) {
// Have buffer, no matching frame. Look for one
for (size_t j = 0; j < mFrameList.size(); j++) {
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h
index 74921a3..b60f61b 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.h
+++ b/services/camera/libcameraservice/camera2/ZslProcessor.h
@@ -93,7 +93,7 @@
CameraMetadata frame;
};
- static const size_t kZslBufferDepth = 3;
+ static const size_t kZslBufferDepth = 4;
static const size_t kFrameListDepth = kZslBufferDepth * 2;
Vector<CameraMetadata> mFrameList;
size_t mFrameListHead;