Merge "configure bitrate and bitrate mode at encoder initialization for VP8/9" into klp-dev
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index f447c5b..7765914 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -133,11 +133,19 @@
}
status_t CameraMetadata::append(const CameraMetadata &other) {
+ return append(other.mBuffer);
+}
+
+status_t CameraMetadata::append(const camera_metadata_t* other) {
if (mLocked) {
ALOGE("%s: CameraMetadata is locked", __FUNCTION__);
return INVALID_OPERATION;
}
- return append_camera_metadata(mBuffer, other.mBuffer);
+ size_t extraEntries = get_camera_metadata_entry_count(other);
+ size_t extraData = get_camera_metadata_data_count(other);
+ resizeIfNeeded(extraEntries, extraData);
+
+ return append_camera_metadata(mBuffer, other);
}
size_t CameraMetadata::entryCount() const {
diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h
index fe2bd19..1254d3c 100644
--- a/include/camera/CameraMetadata.h
+++ b/include/camera/CameraMetadata.h
@@ -99,6 +99,11 @@
status_t append(const CameraMetadata &other);
/**
+ * Append metadata from a raw camera_metadata buffer
+ */
+ status_t append(const camera_metadata* other);
+
+ /**
* Number of metadata entries.
*/
size_t entryCount() const;
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
index 6b7a63c..34213be 100644
--- a/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -41,7 +41,8 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenTimeLapseFrameCaptureUs);
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ bool storeMetaDataInVideoBuffers = true);
virtual ~CameraSourceTimeLapse();
@@ -116,7 +117,8 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenTimeLapseFrameCaptureUs);
+ int64_t timeBetweenTimeLapseFrameCaptureUs,
+ bool storeMetaDataInVideoBuffers = true);
// Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
// It only handles the case when mLastReadBufferCopy is signalled. Otherwise
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 095d5ca..f9d9020 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -70,8 +70,9 @@
mOutputFd(-1),
mAudioSource(AUDIO_SOURCE_CNT),
mVideoSource(VIDEO_SOURCE_LIST_END),
- mStarted(false), mSurfaceMediaSource(NULL),
- mCaptureTimeLapse(false) {
+ mCaptureTimeLapse(false),
+ mStarted(false),
+ mSurfaceMediaSource(NULL) {
ALOGV("Constructor");
reset();
@@ -1089,7 +1090,22 @@
}
}
-status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+status_t StagefrightRecorder::checkVideoEncoderCapabilities(
+ bool *supportsCameraSourceMetaDataMode) {
+ /* hardware codecs must support camera source meta data mode */
+ Vector<CodecCapabilities> codecs;
+ OMXClient client;
+ CHECK_EQ(client.connect(), (status_t)OK);
+ QueryCodecs(
+ client.interface(),
+ (mVideoEncoder == VIDEO_ENCODER_H263 ? MEDIA_MIMETYPE_VIDEO_H263 :
+ mVideoEncoder == VIDEO_ENCODER_MPEG_4_SP ? MEDIA_MIMETYPE_VIDEO_MPEG4 :
+ mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),
+ false /* decoder */, true /* hwCodec */, &codecs);
+ *supportsCameraSourceMetaDataMode = codecs.size() > 0;
+ ALOGV("encoder %s camera source meta-data mode",
+ *supportsCameraSourceMetaDataMode ? "supports" : "DOES NOT SUPPORT");
+
if (!mCaptureTimeLapse) {
// Dont clip for time lapse capture as encoder will have enough
// time to encode because of slow capture rate of time lapse.
@@ -1307,7 +1323,9 @@
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
status_t err = OK;
- if ((err = checkVideoEncoderCapabilities()) != OK) {
+ bool encoderSupportsCameraSourceMetaDataMode;
+ if ((err = checkVideoEncoderCapabilities(
+ &encoderSupportsCameraSourceMetaDataMode)) != OK) {
return err;
}
Size videoSize;
@@ -1323,13 +1341,14 @@
mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
videoSize, mFrameRate, mPreviewSurface,
- mTimeBetweenTimeLapseFrameCaptureUs);
+ mTimeBetweenTimeLapseFrameCaptureUs,
+ encoderSupportsCameraSourceMetaDataMode);
*cameraSource = mCameraSourceTimeLapse;
} else {
*cameraSource = CameraSource::CreateFromCamera(
mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
videoSize, mFrameRate,
- mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
+ mPreviewSurface, encoderSupportsCameraSourceMetaDataMode);
}
mCamera.clear();
mCameraProxy.clear();
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index c864207..31f09e0 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -139,7 +139,8 @@
status_t startRTPRecording();
status_t startMPEG2TSRecording();
sp<MediaSource> createAudioSource();
- status_t checkVideoEncoderCapabilities();
+ status_t checkVideoEncoderCapabilities(
+ bool *supportsCameraSourceMetaDataMode);
status_t checkAudioEncoderCapabilities();
// Generic MediaSource set-up. Returns the appropriate
// source (CameraSource or SurfaceMediaSource)
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index d8b35d7..f1782cc 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -201,7 +201,16 @@
switch (what) {
case LiveSession::kWhatPrepared:
{
- notifyVideoSizeChanged(0, 0);
+ // notify the current size here if we have it, otherwise report an initial size of (0,0)
+ sp<AMessage> format = getFormat(false /* audio */);
+ int32_t width;
+ int32_t height;
+ if (format != NULL &&
+ format->findInt32("width", &width) && format->findInt32("height", &height)) {
+ notifyVideoSizeChanged(width, height);
+ } else {
+ notifyVideoSizeChanged(0, 0);
+ }
uint32_t flags = FLAG_CAN_PAUSE;
if (mLiveSession->isSeekable()) {
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f5c7a97..528fdb9 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -3088,11 +3088,16 @@
/* these are unfilled buffers returned by client */
CHECK(msg->findInt32("err", &err));
- ALOGV("[%s] saw error %d instead of an input buffer",
- mCodec->mComponentName.c_str(), err);
+ if (err == OK) {
+ /* buffers with no errors are returned on MediaCodec.flush */
+ mode = KEEP_BUFFERS;
+ } else {
+ ALOGV("[%s] saw error %d instead of an input buffer",
+ mCodec->mComponentName.c_str(), err);
+ eos = true;
+ }
buffer.clear();
- mode = KEEP_BUFFERS;
}
int32_t tmp;
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 20214e8..5772316 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -41,13 +41,15 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenFrameCaptureUs) {
+ int64_t timeBetweenFrameCaptureUs,
+ bool storeMetaDataInVideoBuffers) {
CameraSourceTimeLapse *source = new
CameraSourceTimeLapse(camera, proxy, cameraId,
clientName, clientUid,
videoSize, videoFrameRate, surface,
- timeBetweenFrameCaptureUs);
+ timeBetweenFrameCaptureUs,
+ storeMetaDataInVideoBuffers);
if (source != NULL) {
if (source->initCheck() != OK) {
@@ -67,9 +69,11 @@
Size videoSize,
int32_t videoFrameRate,
const sp<IGraphicBufferProducer>& surface,
- int64_t timeBetweenFrameCaptureUs)
+ int64_t timeBetweenFrameCaptureUs,
+ bool storeMetaDataInVideoBuffers)
: CameraSource(camera, proxy, cameraId, clientName, clientUid,
- videoSize, videoFrameRate, surface, true),
+ videoSize, videoFrameRate, surface,
+ storeMetaDataInVideoBuffers),
mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
mLastTimeLapseFrameRealTimestampUs(0),
mSkipCurrentFrame(false) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e0686be..c36dd7c 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1500,7 +1500,8 @@
info->mOwnedByClient = false;
if (portIndex == kPortIndexInput) {
- msg->setInt32("err", ERROR_END_OF_STREAM);
+ /* no error, just returning buffers */
+ msg->setInt32("err", OK);
}
msg->post();
}
diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp
index 6a16bb4..1a9a26b 100644
--- a/media/libstagefright/TimedEventQueue.cpp
+++ b/media/libstagefright/TimedEventQueue.cpp
@@ -38,11 +38,14 @@
namespace android {
+static int64_t kWakelockMinDelay = 100000ll; // 100ms
+
TimedEventQueue::TimedEventQueue()
: mNextEventID(1),
mRunning(false),
mStopped(false),
- mDeathRecipient(new PMDeathRecipient(this)) {
+ mDeathRecipient(new PMDeathRecipient(this)),
+ mWakeLockCount(0) {
}
TimedEventQueue::~TimedEventQueue() {
@@ -87,9 +90,7 @@
// some events may be left in the queue if we did not flush and the wake lock
// must be released.
- if (!mQueue.empty()) {
- releaseWakeLock_l();
- }
+ releaseWakeLock_l(true /*force*/);
mQueue.clear();
mRunning = false;
@@ -126,13 +127,15 @@
QueueItem item;
item.event = event;
item.realtime_us = realtime_us;
+ item.has_wakelock = false;
if (it == mQueue.begin()) {
mQueueHeadChangedCondition.signal();
}
- if (mQueue.empty()) {
+ if (realtime_us > ALooper::GetNowUs() + kWakelockMinDelay) {
acquireWakeLock_l();
+ item.has_wakelock = true;
}
mQueue.insert(it, item);
@@ -188,10 +191,10 @@
ALOGV("cancelling event %d", (*it).event->eventID());
(*it).event->setEventID(0);
- it = mQueue.erase(it);
- if (mQueue.empty()) {
+ if ((*it).has_wakelock) {
releaseWakeLock_l();
}
+ it = mQueue.erase(it);
if (stopAfterFirstMatch) {
return;
}
@@ -297,11 +300,10 @@
if ((*it).event->eventID() == id) {
sp<Event> event = (*it).event;
event->setEventID(0);
-
- mQueue.erase(it);
- if (mQueue.empty()) {
+ if ((*it).has_wakelock) {
releaseWakeLock_l();
}
+ mQueue.erase(it);
return event;
}
}
@@ -313,51 +315,59 @@
void TimedEventQueue::acquireWakeLock_l()
{
- if (mWakeLockToken != 0) {
- return;
- }
- if (mPowerManager == 0) {
- // use checkService() to avoid blocking if power service is not up yet
- sp<IBinder> binder =
- defaultServiceManager()->checkService(String16("power"));
- if (binder == 0) {
- ALOGW("cannot connect to the power manager service");
- } else {
- mPowerManager = interface_cast<IPowerManager>(binder);
- binder->linkToDeath(mDeathRecipient);
+ if (mWakeLockCount++ == 0) {
+ CHECK(mWakeLockToken == 0);
+ if (mPowerManager == 0) {
+ // use checkService() to avoid blocking if power service is not up yet
+ sp<IBinder> binder =
+ defaultServiceManager()->checkService(String16("power"));
+ if (binder == 0) {
+ ALOGW("cannot connect to the power manager service");
+ } else {
+ mPowerManager = interface_cast<IPowerManager>(binder);
+ binder->linkToDeath(mDeathRecipient);
+ }
}
- }
- if (mPowerManager != 0) {
- sp<IBinder> binder = new BBinder();
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
- binder,
- String16("TimedEventQueue"),
- String16("media"));
- IPCThreadState::self()->restoreCallingIdentity(token);
- if (status == NO_ERROR) {
- mWakeLockToken = binder;
+ if (mPowerManager != 0) {
+ sp<IBinder> binder = new BBinder();
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
+ binder,
+ String16("TimedEventQueue"),
+ String16("media"));
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ if (status == NO_ERROR) {
+ mWakeLockToken = binder;
+ }
}
}
}
-void TimedEventQueue::releaseWakeLock_l()
+void TimedEventQueue::releaseWakeLock_l(bool force)
{
- if (mWakeLockToken == 0) {
- return;
+ if (force) {
+ if (mWakeLockCount == 0) {
+ return;
+ }
+ // Force wakelock release below by setting reference count to 1.
+ mWakeLockCount = 1;
}
- if (mPowerManager != 0) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mPowerManager->releaseWakeLock(mWakeLockToken, 0);
- IPCThreadState::self()->restoreCallingIdentity(token);
+ CHECK(mWakeLockCount != 0);
+ if (--mWakeLockCount == 0) {
+ CHECK(mWakeLockToken != 0);
+ if (mPowerManager != 0) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mPowerManager->releaseWakeLock(mWakeLockToken, 0);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+ mWakeLockToken.clear();
}
- mWakeLockToken.clear();
}
void TimedEventQueue::clearPowerManager()
{
Mutex::Autolock _l(mLock);
- releaseWakeLock_l();
+ releaseWakeLock_l(true /*force*/);
mPowerManager.clear();
}
diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h
index 4e49c83..38a08b1 100644
--- a/media/libstagefright/include/TimedEventQueue.h
+++ b/media/libstagefright/include/TimedEventQueue.h
@@ -118,6 +118,7 @@
struct QueueItem {
sp<Event> event;
int64_t realtime_us;
+ bool has_wakelock;
};
struct StopEvent : public TimedEventQueue::Event {
@@ -139,6 +140,7 @@
sp<IPowerManager> mPowerManager;
sp<IBinder> mWakeLockToken;
const sp<PMDeathRecipient> mDeathRecipient;
+ uint32_t mWakeLockCount;
static void *ThreadWrapper(void *me);
void threadEntry();
@@ -146,7 +148,7 @@
sp<Event> removeEventFromQueue_l(event_id id);
void acquireWakeLock_l();
- void releaseWakeLock_l();
+ void releaseWakeLock_l(bool force = false);
TimedEventQueue(const TimedEventQueue &);
TimedEventQueue &operator=(const TimedEventQueue &);
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index f295e5a..a7c5317 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -148,6 +148,18 @@
}
}
+void GraphicBufferSource::omxIdle() {
+ ALOGV("omxIdle");
+
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting) {
+ // We are only interested in the transition from executing->idle,
+ // not loaded->idle.
+ mEndOfStream = mEndOfStreamSent = true;
+ }
+}
+
void GraphicBufferSource::omxLoaded(){
Mutex::Autolock autoLock(mMutex);
if (!mExecuting) {
@@ -213,7 +225,12 @@
// see if the GraphicBuffer reference was null, which should only ever
// happen for EOS.
if (codecBuffer.mGraphicBuffer == NULL) {
- CHECK(mEndOfStream && mEndOfStreamSent);
+ if (!(mEndOfStream && mEndOfStreamSent)) {
+ // This can happen when broken code sends us the same buffer
+ // twice in a row.
+ ALOGE("ERROR: codecBufferEmptied on non-EOS null buffer "
+ "(buffer emptied twice?)");
+ }
// No GraphicBuffer to deal with, no additional input or output is
// expected, so just return.
return;
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 244a843..9e5eee6 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -69,6 +69,11 @@
// sitting in the BufferQueue, this will send them to the codec.
void omxExecuting();
+ // This is called when OMX transitions to OMX_StateIdle, indicating that
+ // the codec is meant to return all buffers back to the client for them
+ // to be freed. Do NOT submit any more buffers to the component.
+ void omxIdle();
+
// This is called when OMX transitions to OMX_StateLoaded, indicating that
// we are shutting down.
void omxLoaded();
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 46e5d71..5f104fc 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -243,13 +243,18 @@
status_t OMXNodeInstance::sendCommand(
OMX_COMMANDTYPE cmd, OMX_S32 param) {
const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
- if (bufferSource != NULL
- && cmd == OMX_CommandStateSet
- && param == OMX_StateLoaded) {
- // Initiating transition from Executing -> Loaded
- // Buffers are about to be freed.
- bufferSource->omxLoaded();
- setGraphicBufferSource(NULL);
+ if (bufferSource != NULL && cmd == OMX_CommandStateSet) {
+ if (param == OMX_StateIdle) {
+ // Initiating transition from Executing -> Idle
+ // ACodec is waiting for all buffers to be returned, do NOT
+ // submit any more buffers to the codec.
+ bufferSource->omxIdle();
+ } else if (param == OMX_StateLoaded) {
+ // Initiating transition from Idle/Executing -> Loaded
+ // Buffers are about to be freed.
+ bufferSource->omxLoaded();
+ setGraphicBufferSource(NULL);
+ }
// fall through
}
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 9d705f2..ba8195e 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3673,6 +3673,9 @@
if (--(track->mRetryCount) <= 0) {
ALOGV("BUFFER TIMEOUT: remove(%d) from active list", track->name());
tracksToRemove->add(track);
+ // indicate to client process that the track was disabled because of underrun;
+ // it will then automatically call start() when data is available
+ android_atomic_or(CBLK_DISABLED, &cblk->mFlags);
} else if (last) {
mixerStatus = MIXER_TRACKS_ENABLED;
}
@@ -4094,6 +4097,9 @@
ALOGV("OffloadThread: BUFFER TIMEOUT: remove(%d) from active list",
track->name());
tracksToRemove->add(track);
+ // indicate to client process that the track was disabled because of underrun;
+ // it will then automatically call start() when data is available
+ android_atomic_or(CBLK_DISABLED, &cblk->mFlags);
} else if (last){
mixerStatus = MIXER_TRACKS_ENABLED;
}
@@ -4239,6 +4245,7 @@
for (size_t i = 0; i < outputTracks.size(); i++) {
outputTracks[i]->write(mMixBuffer, writeFrames);
}
+ mStandby = false;
return (ssize_t)mixBufferSize;
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index df3b162..0b6ca5c 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -76,13 +76,15 @@
return res;
}
- SharedParameters::Lock l(mParameters);
+ {
+ SharedParameters::Lock l(mParameters);
- res = l.mParameters.initialize(&(mDevice->info()));
- if (res != OK) {
- ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
- __FUNCTION__, mCameraId, strerror(-res), res);
- return NO_INIT;
+ res = l.mParameters.initialize(&(mDevice->info()));
+ if (res != OK) {
+ ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
+ __FUNCTION__, mCameraId, strerror(-res), res);
+ return NO_INIT;
+ }
}
String8 threadName;
@@ -135,6 +137,7 @@
mCallbackProcessor->run(threadName.string());
if (gLogLevel >= 1) {
+ SharedParameters::Lock l(mParameters);
ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
mCameraId);
ALOGD("%s", l.mParameters.paramsFlattened.string());
@@ -353,6 +356,10 @@
result.appendFormat(" meteringCropRegion\n");
haveQuirk = true;
}
+ if (p.quirks.partialResults) {
+ result.appendFormat(" usePartialResult\n");
+ haveQuirk = true;
+ }
if (!haveQuirk) {
result.appendFormat(" none\n");
}
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index c34cb12..19acae4 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -29,13 +29,27 @@
namespace camera2 {
FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
- wp<Camera2Client> client) :
+ sp<Camera2Client> client) :
FrameProcessorBase(device),
mClient(client),
- mLastFrameNumberOfFaces(0) {
+ mLastFrameNumberOfFaces(0),
+ mLast3AFrameNumber(-1) {
sp<CameraDeviceBase> d = device.promote();
mSynthesize3ANotify = !(d->willNotify3A());
+
+ {
+ SharedParameters::Lock l(client->getParameters());
+ mUsePartialQuirk = l.mParameters.quirks.partialResults;
+
+ // Initialize starting 3A state
+ m3aState.afTriggerId = l.mParameters.afTriggerCounter;
+ m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
+ // Check if lens is fixed-focus
+ if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
+ m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
+ }
+ }
}
FrameProcessor::~FrameProcessor() {
@@ -49,20 +63,25 @@
return false;
}
- if (processFaceDetect(frame, client) != OK) {
+ bool partialResult = false;
+ if (mUsePartialQuirk) {
+ camera_metadata_entry_t entry;
+ entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ if (entry.count > 0 &&
+ entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+ partialResult = true;
+ }
+ }
+
+ if (!partialResult && processFaceDetect(frame, client) != OK) {
return false;
}
if (mSynthesize3ANotify) {
- // Ignoring missing fields for now
process3aState(frame, client);
}
- if (!FrameProcessorBase::processSingleFrame(frame, device)) {
- return false;
- }
-
- return true;
+ return FrameProcessorBase::processSingleFrame(frame, device);
}
status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
@@ -198,86 +217,75 @@
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
- int mId = client->getCameraId();
+ int cameraId = client->getCameraId();
entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
int32_t frameNumber = entry.data.i32[0];
+ // Don't send 3A notifications for the same frame number twice
+ if (frameNumber <= mLast3AFrameNumber) {
+ ALOGV("%s: Already sent 3A for frame number %d, skipping",
+ __FUNCTION__, frameNumber);
+ return OK;
+ }
+
+ mLast3AFrameNumber = frameNumber;
+
// Get 3A states from result metadata
bool gotAllStates = true;
AlgState new3aState;
- entry = frame.find(ANDROID_CONTROL_AE_STATE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- new3aState.aeState =
- static_cast<camera_metadata_enum_android_control_ae_state>(
- entry.data.u8[0]);
- }
+ // TODO: Also use AE mode, AE trigger ID
- entry = frame.find(ANDROID_CONTROL_AF_STATE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- new3aState.afState =
- static_cast<camera_metadata_enum_android_control_af_state>(
- entry.data.u8[0]);
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_MODE,
+ &new3aState.afMode, frameNumber, cameraId);
- entry = frame.find(ANDROID_CONTROL_AWB_STATE);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- new3aState.awbState =
- static_cast<camera_metadata_enum_android_control_awb_state>(
- entry.data.u8[0]);
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_MODE,
+ &new3aState.awbMode, frameNumber, cameraId);
- int32_t afTriggerId = 0;
- entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- afTriggerId = entry.data.i32[0];
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AE_STATE,
+ &new3aState.aeState, frameNumber, cameraId);
- int32_t aeTriggerId = 0;
- entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
- if (entry.count == 0) {
- ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL"
- " for frame %d!",
- __FUNCTION__, mId, frameNumber);
- gotAllStates = false;
- } else {
- aeTriggerId = entry.data.i32[0];
- }
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_STATE,
+ &new3aState.afState, frameNumber, cameraId);
+
+ gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_STATE,
+ &new3aState.awbState, frameNumber, cameraId);
+
+ gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AF_TRIGGER_ID,
+ &new3aState.afTriggerId, frameNumber, cameraId);
+
+ gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ &new3aState.aeTriggerId, frameNumber, cameraId);
if (!gotAllStates) return BAD_VALUE;
if (new3aState.aeState != m3aState.aeState) {
- ALOGV("%s: AE state changed from 0x%x to 0x%x",
- __FUNCTION__, m3aState.aeState, new3aState.aeState);
- client->notifyAutoExposure(new3aState.aeState, aeTriggerId);
+ ALOGV("%s: Camera %d: AE state %d->%d",
+ __FUNCTION__, cameraId,
+ m3aState.aeState, new3aState.aeState);
+ client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
}
- if (new3aState.afState != m3aState.afState) {
- ALOGV("%s: AF state changed from 0x%x to 0x%x",
- __FUNCTION__, m3aState.afState, new3aState.afState);
- client->notifyAutoFocus(new3aState.afState, afTriggerId);
+
+ if (new3aState.afState != m3aState.afState ||
+ new3aState.afMode != m3aState.afMode ||
+ new3aState.afTriggerId != m3aState.afTriggerId) {
+ ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
+ __FUNCTION__, cameraId,
+ m3aState.afState, new3aState.afState,
+ m3aState.afMode, new3aState.afMode,
+ m3aState.afTriggerId, new3aState.afTriggerId);
+ client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
}
- if (new3aState.awbState != m3aState.awbState) {
- ALOGV("%s: AWB state changed from 0x%x to 0x%x",
- __FUNCTION__, m3aState.awbState, new3aState.awbState);
- client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId);
+ if (new3aState.awbState != m3aState.awbState ||
+ new3aState.awbMode != m3aState.awbMode) {
+ ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
+ __FUNCTION__, cameraId,
+ m3aState.awbState, new3aState.awbState,
+ m3aState.awbMode, new3aState.awbMode);
+ client->notifyAutoWhitebalance(new3aState.awbState,
+ new3aState.aeTriggerId);
}
m3aState = new3aState;
@@ -285,6 +293,39 @@
return OK;
}
+template<typename Src, typename T>
+bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
+ T* value, int32_t frameNumber, int cameraId) {
+ camera_metadata_ro_entry_t entry;
+ if (value == NULL) {
+ ALOGE("%s: Camera %d: Value to write to is NULL",
+ __FUNCTION__, cameraId);
+ return false;
+ }
+
+ entry = result.find(tag);
+ if (entry.count == 0) {
+ ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
+ __FUNCTION__, cameraId,
+ get_camera_metadata_tag_name(tag), frameNumber);
+ return false;
+ } else {
+ switch(sizeof(Src)){
+ case sizeof(uint8_t):
+ *value = static_cast<T>(entry.data.u8[0]);
+ break;
+ case sizeof(int32_t):
+ *value = static_cast<T>(entry.data.i32[0]);
+ break;
+ default:
+ ALOGE("%s: Camera %d: Unsupported source",
+ __FUNCTION__, cameraId);
+ return false;
+ }
+ }
+ return true;
+}
+
void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
const camera_frame_metadata &metadata) {
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 2a17d45..856ad32 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -39,7 +39,7 @@
*/
class FrameProcessor : public FrameProcessorBase {
public:
- FrameProcessor(wp<CameraDeviceBase> device, wp<Camera2Client> client);
+ FrameProcessor(wp<CameraDeviceBase> device, sp<Camera2Client> client);
~FrameProcessor();
private:
@@ -61,18 +61,44 @@
status_t process3aState(const CameraMetadata &frame,
const sp<Camera2Client> &client);
+ // Helper for process3aState
+ template<typename Src, typename T>
+ bool get3aResult(const CameraMetadata& result, int32_t tag, T* value,
+ int32_t frameNumber, int cameraId);
+
+
struct AlgState {
+ // TODO: also track AE mode
+ camera_metadata_enum_android_control_af_mode afMode;
+ camera_metadata_enum_android_control_awb_mode awbMode;
+
camera_metadata_enum_android_control_ae_state aeState;
camera_metadata_enum_android_control_af_state afState;
camera_metadata_enum_android_control_awb_state awbState;
+ int32_t afTriggerId;
+ int32_t aeTriggerId;
+
+ // These defaults need to match those in Parameters.cpp
AlgState() :
+ afMode(ANDROID_CONTROL_AF_MODE_AUTO),
+ awbMode(ANDROID_CONTROL_AWB_MODE_AUTO),
aeState(ANDROID_CONTROL_AE_STATE_INACTIVE),
afState(ANDROID_CONTROL_AF_STATE_INACTIVE),
- awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE) {
+ awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE),
+ afTriggerId(0),
+ aeTriggerId(0) {
}
} m3aState;
+ // Whether the partial result quirk is enabled for this device
+ bool mUsePartialQuirk;
+
+ // Track most recent frame number for which 3A notifications were sent for.
+ // Used to filter against sending 3A notifications for the same frame
+ // several times.
+ int32_t mLast3AFrameNumber;
+
// Emit FaceDetection event to java if faces changed
void callbackFaceDetection(sp<Camera2Client> client,
const camera_frame_metadata &metadata);
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 8a4e75c..1e425ba 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -1047,6 +1047,11 @@
ALOGV_IF(quirks.meteringCropRegion, "Camera %d: Quirk meteringCropRegion"
" enabled", cameraId);
+ entry = info->find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+ quirks.partialResults = (entry.count != 0 && entry.data.u8[0] == 1);
+ ALOGV_IF(quirks.partialResults, "Camera %d: Quirk usePartialResult"
+ " enabled", cameraId);
+
return OK;
}
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index bcbdb99..93ab113 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -209,6 +209,7 @@
bool triggerAfWithAuto;
bool useZslFormat;
bool meteringCropRegion;
+ bool partialResults;
} quirks;
/**
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 52906ee..b2c9b33 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -145,6 +145,15 @@
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
+ // Quirks: Don't deliver partial results to listeners
+ entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
+ if (entry.count != 0 &&
+ entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+ ALOGV("%s: Camera %d: Not forwarding partial result to listeners",
+ __FUNCTION__, device->getId());
+ return OK;
+ }
+
entry = frame.find(ANDROID_REQUEST_ID);
if (entry.count == 0) {
ALOGE("%s: Camera %d: Error reading frame id",
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index c320d6c..cb72e0e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -55,6 +55,7 @@
mId(id),
mHal3Device(NULL),
mStatus(STATUS_UNINITIALIZED),
+ mUsePartialResultQuirk(false),
mNextResultFrameNumber(0),
mNextShutterFrameNumber(0),
mListener(NULL)
@@ -193,6 +194,15 @@
mNeedConfig = true;
mPauseStateNotify = false;
+ /** Check for quirks */
+
+ // Will the HAL be sending in early partial result metadata?
+ camera_metadata_entry partialResultsQuirk =
+ mDeviceInfo.find(ANDROID_QUIRKS_USE_PARTIAL_RESULT);
+ if (partialResultsQuirk.count > 0 && partialResultsQuirk.data.u8[0] == 1) {
+ mUsePartialResultQuirk = true;
+ }
+
return OK;
}
@@ -1391,6 +1401,168 @@
}
/**
+ * QUIRK(partial results)
+ * Check if all 3A fields are ready, and send off a partial 3A-only result
+ * to the output frame queue
+ */
+bool Camera3Device::processPartial3AQuirk(int32_t frameNumber,
+ const CameraMetadata& partial) {
+
+ // Check if all 3A states are present
+ // The full list of fields is
+ // android.control.afMode
+ // android.control.awbMode
+ // android.control.aeState
+ // android.control.awbState
+ // android.control.afState
+ // android.control.afTriggerID
+ // android.control.aePrecaptureID
+ // TODO: Add android.control.aeMode
+
+ bool gotAllStates = true;
+
+ uint8_t afMode;
+ uint8_t awbMode;
+ uint8_t aeState;
+ uint8_t afState;
+ uint8_t awbState;
+ int32_t afTriggerId;
+ int32_t aeTriggerId;
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_MODE,
+ &afMode, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_MODE,
+ &awbMode, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_STATE,
+ &aeState, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_STATE,
+ &afState, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_STATE,
+ &awbState, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_TRIGGER_ID,
+ &afTriggerId, frameNumber);
+
+ gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ &aeTriggerId, frameNumber);
+
+ if (!gotAllStates) return false;
+
+ ALOGVV("%s: Camera %d: Frame %d: AF mode %d, AWB mode %d, "
+ "AF state %d, AE state %d, AWB state %d, "
+ "AF trigger %d, AE precapture trigger %d",
+ __FUNCTION__, mId, frameNumber,
+ afMode, awbMode,
+ afState, aeState, awbState,
+ afTriggerId, aeTriggerId);
+
+ // Got all states, so construct a minimal result to send
+ // In addition to the above fields, this means adding in
+ // android.request.frameCount
+ // android.quirks.partialResult
+
+ const size_t kMinimal3AResultEntries = 7;
+
+ Mutex::Autolock l(mOutputLock);
+
+ CameraMetadata& min3AResult =
+ *mResultQueue.insert(
+ mResultQueue.end(),
+ CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0));
+
+ if (!insert3AResult(min3AResult, ANDROID_REQUEST_FRAME_COUNT,
+ &frameNumber, frameNumber)) {
+ return false;
+ }
+
+ static const uint8_t partialResult = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
+ if (!insert3AResult(min3AResult, ANDROID_QUIRKS_PARTIAL_RESULT,
+ &partialResult, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_MODE,
+ &afMode, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_MODE,
+ &awbMode, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_STATE,
+ &aeState, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_STATE,
+ &afState, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AWB_STATE,
+ &awbState, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AF_TRIGGER_ID,
+ &afTriggerId, frameNumber)) {
+ return false;
+ }
+
+ if (!insert3AResult(min3AResult, ANDROID_CONTROL_AE_PRECAPTURE_ID,
+ &aeTriggerId, frameNumber)) {
+ return false;
+ }
+
+ mResultSignal.signal();
+
+ return true;
+}
+
+template<typename T>
+bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
+ T* value, int32_t frameNumber) {
+ (void) frameNumber;
+
+ camera_metadata_ro_entry_t entry;
+
+ entry = result.find(tag);
+ if (entry.count == 0) {
+ ALOGVV("%s: Camera %d: Frame %d: No %s provided by HAL!", __FUNCTION__,
+ mId, frameNumber, get_camera_metadata_tag_name(tag));
+ return false;
+ }
+
+ if (sizeof(T) == sizeof(uint8_t)) {
+ *value = entry.data.u8[0];
+ } else if (sizeof(T) == sizeof(int32_t)) {
+ *value = entry.data.i32[0];
+ } else {
+ ALOGE("%s: Unexpected type", __FUNCTION__);
+ return false;
+ }
+ return true;
+}
+
+template<typename T>
+bool Camera3Device::insert3AResult(CameraMetadata& result, int32_t tag,
+ const T* value, int32_t frameNumber) {
+ if (result.update(tag, value, 1) != NO_ERROR) {
+ mResultQueue.erase(--mResultQueue.end(), mResultQueue.end());
+ SET_ERR("Frame %d: Failed to set %s in partial metadata",
+ frameNumber, get_camera_metadata_tag_name(tag));
+ return false;
+ }
+ return true;
+}
+
+/**
* Camera HAL device callback methods
*/
@@ -1405,6 +1577,8 @@
frameNumber);
return;
}
+ bool partialResultQuirk = false;
+ CameraMetadata collectedQuirkResult;
// Get capture timestamp from list of in-flight requests, where it was added
// by the shutter notification for this frame. Then update the in-flight
@@ -1420,24 +1594,57 @@
return;
}
InFlightRequest &request = mInFlightMap.editValueAt(idx);
+
+ // Check if this result carries only partial metadata
+ if (mUsePartialResultQuirk && result->result != NULL) {
+ camera_metadata_ro_entry_t partialResultEntry;
+ res = find_camera_metadata_ro_entry(result->result,
+ ANDROID_QUIRKS_PARTIAL_RESULT, &partialResultEntry);
+ if (res != NAME_NOT_FOUND &&
+ partialResultEntry.count > 0 &&
+ partialResultEntry.data.u8[0] ==
+ ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
+ // A partial result. Flag this as such, and collect this
+ // set of metadata into the in-flight entry.
+ partialResultQuirk = true;
+ request.partialResultQuirk.collectedResult.append(
+ result->result);
+ request.partialResultQuirk.collectedResult.erase(
+ ANDROID_QUIRKS_PARTIAL_RESULT);
+ // Fire off a 3A-only result if possible
+ if (!request.partialResultQuirk.haveSent3A) {
+ request.partialResultQuirk.haveSent3A =
+ processPartial3AQuirk(frameNumber,
+ request.partialResultQuirk.collectedResult);
+ }
+ }
+ }
+
timestamp = request.captureTimestamp;
/**
- * One of the following must happen before it's legal to call process_capture_result:
+ * One of the following must happen before it's legal to call process_capture_result,
+ * unless partial metadata is being provided:
* - CAMERA3_MSG_SHUTTER (expected during normal operation)
* - CAMERA3_MSG_ERROR (expected during flush)
*/
- if (request.requestStatus == OK && timestamp == 0) {
+ if (request.requestStatus == OK && timestamp == 0 && !partialResultQuirk) {
SET_ERR("Called before shutter notify for frame %d",
frameNumber);
return;
}
- if (result->result != NULL) {
+ // Did we get the (final) result metadata for this capture?
+ if (result->result != NULL && !partialResultQuirk) {
if (request.haveResultMetadata) {
SET_ERR("Called multiple times with metadata for frame %d",
frameNumber);
return;
}
+ if (mUsePartialResultQuirk &&
+ !request.partialResultQuirk.collectedResult.isEmpty()) {
+ collectedQuirkResult.acquire(
+ request.partialResultQuirk.collectedResult);
+ }
request.haveResultMetadata = true;
}
@@ -1449,6 +1656,7 @@
return;
}
+ // Check if everything has arrived for this result (buffers and metadata)
if (request.haveResultMetadata && request.numBuffersLeft == 0) {
ATRACE_ASYNC_END("frame capture", frameNumber);
mInFlightMap.removeItemsAt(idx, 1);
@@ -1463,9 +1671,12 @@
}
// Process the result metadata, if provided
- if (result->result != NULL) {
+ bool gotResult = false;
+ if (result->result != NULL && !partialResultQuirk) {
Mutex::Autolock l(mOutputLock);
+ gotResult = true;
+
if (frameNumber != mNextResultFrameNumber) {
SET_ERR("Out-of-order capture result metadata submitted! "
"(got frame number %d, expecting %d)",
@@ -1474,19 +1685,26 @@
}
mNextResultFrameNumber++;
- CameraMetadata &captureResult =
- *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
-
+ CameraMetadata captureResult;
captureResult = result->result;
+
if (captureResult.update(ANDROID_REQUEST_FRAME_COUNT,
(int32_t*)&frameNumber, 1) != OK) {
SET_ERR("Failed to set frame# in metadata (%d)",
frameNumber);
+ gotResult = false;
} else {
ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
__FUNCTION__, mId, frameNumber);
}
+ // Append any previous partials to form a complete result
+ if (mUsePartialResultQuirk && !collectedQuirkResult.isEmpty()) {
+ captureResult.append(collectedQuirkResult);
+ }
+
+ captureResult.sort();
+
// Check that there's a timestamp in the result metadata
camera_metadata_entry entry =
@@ -1494,10 +1712,19 @@
if (entry.count == 0) {
SET_ERR("No timestamp provided by HAL for frame %d!",
frameNumber);
+ gotResult = false;
} else if (timestamp != entry.data.i64[0]) {
SET_ERR("Timestamp mismatch between shutter notify and result"
" metadata for frame %d (%lld vs %lld respectively)",
frameNumber, timestamp, entry.data.i64[0]);
+ gotResult = false;
+ }
+
+ if (gotResult) {
+ // Valid result, insert into queue
+ CameraMetadata& queuedResult =
+ *mResultQueue.insert(mResultQueue.end(), CameraMetadata());
+ queuedResult.swap(captureResult);
}
} // scope for mOutputLock
@@ -1517,7 +1744,7 @@
// Finally, signal any waiters for new frames
- if (result->result != NULL) {
+ if (gotResult) {
mResultSignal.signal();
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 12252c8..4a24a88 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -188,6 +188,9 @@
// Need to hold on to stream references until configure completes.
Vector<sp<camera3::Camera3StreamInterface> > mDeletedStreams;
+ // Whether quirk ANDROID_QUIRKS_USE_PARTIAL_RESULT is enabled
+ bool mUsePartialResultQuirk;
+
/**** End scope for mLock ****/
class CaptureRequest : public LightRefBase<CaptureRequest> {
@@ -445,6 +448,18 @@
// buffers
int numBuffersLeft;
+ // Fields used by the partial result quirk only
+ struct PartialResultQuirkInFlight {
+ // Set by process_capture_result once 3A has been sent to clients
+ bool haveSent3A;
+ // Result metadata collected so far, when partial results are in use
+ CameraMetadata collectedResult;
+
+ PartialResultQuirkInFlight():
+ haveSent3A(false) {
+ }
+ } partialResultQuirk;
+
// Default constructor needed by KeyedVector
InFlightRequest() :
requestId(0),
@@ -472,6 +487,21 @@
int32_t numBuffers);
/**
+ * For the partial result quirk, check if all 3A state fields are available
+ * and if so, queue up 3A-only result to the client. Returns true if 3A
+ * is sent.
+ */
+ bool processPartial3AQuirk(int32_t frameNumber, const CameraMetadata& partial);
+
+ // Helpers for reading and writing 3A metadata into to/from partial results
+ template<typename T>
+ bool get3AResult(const CameraMetadata& result, int32_t tag,
+ T* value, int32_t frameNumber);
+
+ template<typename T>
+ bool insert3AResult(CameraMetadata &result, int32_t tag, const T* value,
+ int32_t frameNumber);
+ /**
* Tracking for idle detection
*/
sp<camera3::StatusTracker> mStatusTracker;