Merge "Camera2Client: avoid two consecutive sets of configure_streams calls" into lmp-dev
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 1c73995..eb5821b 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -151,7 +151,6 @@
 NuPlayer::NuPlayer()
     : mUIDValid(false),
       mSourceFlags(0),
-      mCurrentPositionUs(0),
       mVideoIsAVC(false),
       mOffloadAudio(false),
       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
@@ -169,7 +168,6 @@
       mFlushingVideo(NONE),
       mSkipRenderingAudioUntilMediaTimeUs(-1ll),
       mSkipRenderingVideoUntilMediaTimeUs(-1ll),
-      mVideoLateByUs(0ll),
       mNumFramesTotal(0ll),
       mNumFramesDropped(0ll),
       mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
@@ -546,8 +544,11 @@
                     // the extractor may not yet be started and will assert.
                     // If the video decoder is not set (perhaps audio only in this case)
                     // do not perform a seek as it is not needed.
-                    mDeferredActions.push_back(
-                            new SeekAction(mCurrentPositionUs, false /* needNotify */));
+                    int64_t currentPositionUs = 0;
+                    if (getCurrentPosition(&currentPositionUs) == OK) {
+                        mDeferredActions.push_back(
+                                new SeekAction(currentPositionUs, false /* needNotify */));
+                    }
                 }
 
                 // If there is a new surface texture, instantiate decoders
@@ -581,7 +582,6 @@
             mVideoEOS = false;
             mSkipRenderingAudioUntilMediaTimeUs = -1;
             mSkipRenderingVideoUntilMediaTimeUs = -1;
-            mVideoLateByUs = 0;
             mNumFramesTotal = 0;
             mNumFramesDropped = 0;
             mStarted = true;
@@ -889,22 +889,6 @@
                         && (mVideoEOS || mVideoDecoder == NULL)) {
                     notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                 }
-            } else if (what == Renderer::kWhatPosition) {
-                int64_t positionUs;
-                CHECK(msg->findInt64("positionUs", &positionUs));
-                mCurrentPositionUs = positionUs;
-
-                CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));
-
-                if (mDriver != NULL) {
-                    sp<NuPlayerDriver> driver = mDriver.promote();
-                    if (driver != NULL) {
-                        driver->notifyPosition(positionUs);
-
-                        driver->notifyFrameStats(
-                                mNumFramesTotal, mNumFramesDropped);
-                    }
-                }
             } else if (what == Renderer::kWhatFlushComplete) {
                 int32_t audio;
                 CHECK(msg->findInt32("audio", &audio));
@@ -1053,10 +1037,6 @@
         case FLUSHING_DECODER:
         {
             *state = FLUSHED;
-
-            if (!audio) {
-                mVideoLateByUs = 0;
-            }
             break;
         }
 
@@ -1066,7 +1046,6 @@
 
             ALOGV("initiating %s decoder shutdown", audio ? "audio" : "video");
             if (!audio) {
-                mVideoLateByUs = 0;
                 // Widevine source reads must stop before releasing the video decoder.
                 if (mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {
                     mSource->stop();
@@ -1487,7 +1466,7 @@
         dropAccessUnit = false;
         if (!audio
                 && !(mSourceFlags & Source::FLAG_SECURE)
-                && mVideoLateByUs > 100000ll
+                && mRenderer->getVideoLateByUs() > 100000ll
                 && mVideoIsAVC
                 && !IsAVCReferenceFrame(accessUnit)) {
             dropAccessUnit = true;
@@ -1822,6 +1801,20 @@
     return err;
 }
 
+status_t NuPlayer::getCurrentPosition(int64_t *mediaUs) {
+    sp<Renderer> renderer = mRenderer;
+    if (renderer == NULL) {
+        return NO_INIT;
+    }
+
+    return renderer->getCurrentPosition(mediaUs);
+}
+
+void NuPlayer::getStats(int64_t *numFramesTotal, int64_t *numFramesDropped) {
+    *numFramesTotal = mNumFramesTotal;
+    *numFramesDropped = mNumFramesDropped;
+}
+
 sp<MetaData> NuPlayer::getFileMeta() {
     return mSource->getFileFormatMeta();
 }
@@ -1879,7 +1872,6 @@
     if (mDriver != NULL) {
         sp<NuPlayerDriver> driver = mDriver.promote();
         if (driver != NULL) {
-            driver->notifyPosition(seekTimeUs);
             if (needNotify) {
                 driver->notifySeekComplete();
             }
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 1b9a756..c61510c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -67,6 +67,8 @@
     status_t getTrackInfo(Parcel* reply) const;
     status_t getSelectedTrack(int32_t type, Parcel* reply) const;
     status_t selectTrack(size_t trackIndex, bool select);
+    status_t getCurrentPosition(int64_t *mediaUs);
+    void getStats(int64_t *mNumFramesTotal, int64_t *mNumFramesDropped);
 
     sp<MetaData> getFileMeta();
 
@@ -126,7 +128,6 @@
     sp<Source> mSource;
     uint32_t mSourceFlags;
     sp<NativeWindowWrapper> mNativeWindow;
-    int64_t mCurrentPositionUs;
     sp<MediaPlayerBase::AudioSink> mAudioSink;
     sp<Decoder> mVideoDecoder;
     bool mVideoIsAVC;
@@ -179,7 +180,6 @@
     int64_t mSkipRenderingAudioUntilMediaTimeUs;
     int64_t mSkipRenderingVideoUntilMediaTimeUs;
 
-    int64_t mVideoLateByUs;
     int64_t mNumFramesTotal, mNumFramesDropped;
 
     int32_t mVideoScalingMode;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index c57955d..ab46074 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -26,6 +26,7 @@
 
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MetaData.h>
 #include <media/stagefright/Utils.h>
 
@@ -38,10 +39,7 @@
       mSetSurfaceInProgress(false),
       mDurationUs(-1),
       mPositionUs(-1),
-      mNotifyTimeRealUs(-1),
-      mPauseStartedTimeUs(-1),
-      mNumFramesTotal(0),
-      mNumFramesDropped(0),
+      mSeekInProgress(false),
       mLooper(new ALooper),
       mPlayerFlags(0),
       mAtEOS(false),
@@ -191,7 +189,7 @@
             mAtEOS = false;
             mState = STATE_STOPPED_AND_PREPARING;
             mIsAsyncPrepare = false;
-            mPlayer->seekToAsync(0);
+            mPlayer->seekToAsync(0, true /* needNotify */);
             while (mState == STATE_STOPPED_AND_PREPARING) {
                 mCondition.wait(mLock);
             }
@@ -216,7 +214,7 @@
             mAtEOS = false;
             mState = STATE_STOPPED_AND_PREPARING;
             mIsAsyncPrepare = true;
-            mPlayer->seekToAsync(0);
+            mPlayer->seekToAsync(0, true /* needNotify */);
             return OK;
         default:
             return INVALID_OPERATION;
@@ -276,14 +274,6 @@
                 mPositionUs = -1;
             } else {
                 mPlayer->resume();
-                if (mNotifyTimeRealUs != -1) {
-                    // Pause time must be set if here by setPauseStartedTimeIfNeeded().
-                    //CHECK(mPauseStartedTimeUs != -1);
-
-                    // if no seek occurs, adjust our notify time so that getCurrentPosition()
-                    // is continuous if read immediately after calling start().
-                    mNotifyTimeRealUs += ALooper::GetNowUs() - mPauseStartedTimeUs;
-                }
             }
             break;
         }
@@ -293,7 +283,6 @@
     }
 
     mState = STATE_RUNNING;
-    mPauseStartedTimeUs = -1;
 
     return OK;
 }
@@ -322,7 +311,6 @@
         default:
             return INVALID_OPERATION;
     }
-    setPauseStartedTimeIfNeeded();
 
     return OK;
 }
@@ -336,7 +324,6 @@
             return OK;
 
         case STATE_RUNNING:
-            setPauseStartedTimeIfNeeded();
             mState = STATE_PAUSED;
             notifyListener_l(MEDIA_PAUSED);
             mPlayer->pause();
@@ -374,6 +361,7 @@
         case STATE_PAUSED:
         {
             mAtEOS = false;
+            mSeekInProgress = true;
             // seeks can take a while, so we essentially paused
             notifyListener_l(MEDIA_PAUSED);
             mPlayer->seekToAsync(seekTimeUs, true /* needNotify */);
@@ -385,44 +373,23 @@
     }
 
     mPositionUs = seekTimeUs;
-    mNotifyTimeRealUs = -1;
     return OK;
 }
 
 status_t NuPlayerDriver::getCurrentPosition(int *msec) {
+    int64_t tempUs = 0;
+    status_t ret = mPlayer->getCurrentPosition(&tempUs);
+
     Mutex::Autolock autoLock(mLock);
-
-    if (mPositionUs < 0) {
-        // mPositionUs is the media time.
-        // It is negative under these cases
-        // (1) == -1 after reset, or very first playback, no stream notification yet.
-        // (2) == -1 start after end of stream, no stream notification yet.
-        // (3) == large negative # after ~292,471 years of continuous playback.
-
-        //CHECK_EQ(mPositionUs, -1);
-        *msec = 0;
-    } else if (mNotifyTimeRealUs == -1) {
-        // A seek has occurred just occurred, no stream notification yet.
-        // mPositionUs (>= 0) is the new media position.
-        *msec = mPositionUs / 1000;
+    // We need to check mSeekInProgress here because mPlayer->seekToAsync is an async call, which
+    // means getCurrentPosition can be called before seek is completed. Iow, renderer may return a
+    // position value that's different the seek to position.
+    if (ret != OK || mSeekInProgress) {
+        tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
     } else {
-        // mPosition must be valid (i.e. >= 0) by the first check above.
-        // We're either playing or have pause time set: mPauseStartedTimeUs is >= 0
-        //LOG_ALWAYS_FATAL_IF(
-        //        !isPlaying() && mPauseStartedTimeUs < 0,
-        //        "Player in non-playing mState(%d) and mPauseStartedTimeUs(%lld) < 0",
-        //        mState, (long long)mPauseStartedTimeUs);
-        ALOG_ASSERT(mNotifyTimeRealUs >= 0);
-        int64_t nowUs =
-                (isPlaying() ?  ALooper::GetNowUs() : mPauseStartedTimeUs);
-        *msec = (mPositionUs + nowUs - mNotifyTimeRealUs + 500ll) / 1000;
-        // It is possible for *msec to be negative if the media position is > 596 hours.
-        // but we turn on this checking in NDEBUG == 0 mode.
-        ALOG_ASSERT(*msec >= 0);
-        ALOGV("getCurrentPosition nowUs(%lld)", (long long)nowUs);
+        mPositionUs = tempUs;
     }
-    ALOGV("getCurrentPosition returning(%d) mPositionUs(%lld) mNotifyRealTimeUs(%lld)",
-            *msec, (long long)mPositionUs, (long long)mNotifyTimeRealUs);
+    *msec = (int)divRound(tempUs, (int64_t)(1000));
     return OK;
 }
 
@@ -605,17 +572,10 @@
     mDurationUs = durationUs;
 }
 
-void NuPlayerDriver::notifyPosition(int64_t positionUs) {
-    Mutex::Autolock autoLock(mLock);
-    if (isPlaying()) {
-        mPositionUs = positionUs;
-        mNotifyTimeRealUs = ALooper::GetNowUs();
-    }
-}
-
 void NuPlayerDriver::notifySeekComplete() {
     ALOGV("notifySeekComplete(%p)", this);
     Mutex::Autolock autoLock(mLock);
+    mSeekInProgress = false;
     notifySeekComplete_l();
 }
 
@@ -636,26 +596,21 @@
     notifyListener_l(wasSeeking ? MEDIA_SEEK_COMPLETE : MEDIA_PREPARED);
 }
 
-void NuPlayerDriver::notifyFrameStats(
-        int64_t numFramesTotal, int64_t numFramesDropped) {
-    Mutex::Autolock autoLock(mLock);
-    mNumFramesTotal = numFramesTotal;
-    mNumFramesDropped = numFramesDropped;
-}
-
 status_t NuPlayerDriver::dump(
         int fd, const Vector<String16> & /* args */) const {
-    Mutex::Autolock autoLock(mLock);
+    int64_t numFramesTotal;
+    int64_t numFramesDropped;
+    mPlayer->getStats(&numFramesTotal, &numFramesDropped);
 
     FILE *out = fdopen(dup(fd), "w");
 
     fprintf(out, " NuPlayer\n");
     fprintf(out, "  numFramesTotal(%" PRId64 "), numFramesDropped(%" PRId64 "), "
                  "percentageDropped(%.2f)\n",
-                 mNumFramesTotal,
-                 mNumFramesDropped,
-                 mNumFramesTotal == 0
-                    ? 0.0 : (double)mNumFramesDropped / mNumFramesTotal);
+                 numFramesTotal,
+                 numFramesDropped,
+                 numFramesTotal == 0
+                    ? 0.0 : (double)numFramesDropped / numFramesTotal);
 
     fclose(out);
     out = NULL;
@@ -690,7 +645,6 @@
         case MEDIA_ERROR:
         {
             mAtEOS = true;
-            setPauseStartedTimeIfNeeded();
             break;
         }
 
@@ -758,10 +712,4 @@
     mPlayerFlags = flags;
 }
 
-void NuPlayerDriver::setPauseStartedTimeIfNeeded() {
-    if (mPauseStartedTimeUs == -1) {
-        mPauseStartedTimeUs = ALooper::GetNowUs();
-    }
-}
-
 }  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index f2bd431..5cba7d9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -68,10 +68,8 @@
     void notifyResetComplete();
     void notifySetSurfaceComplete();
     void notifyDuration(int64_t durationUs);
-    void notifyPosition(int64_t positionUs);
     void notifySeekComplete();
     void notifySeekComplete_l();
-    void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped);
     void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
     void notifyFlagsChanged(uint32_t flags);
 
@@ -106,10 +104,7 @@
     bool mSetSurfaceInProgress;
     int64_t mDurationUs;
     int64_t mPositionUs;
-    int64_t mNotifyTimeRealUs;
-    int64_t mPauseStartedTimeUs;
-    int64_t mNumFramesTotal;
-    int64_t mNumFramesDropped;
+    bool mSeekInProgress;
     // <<<
 
     sp<ALooper> mLooper;
@@ -125,7 +120,6 @@
 
     status_t prepare_l();
     void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
-    void setPauseStartedTimeIfNeeded();
 
     DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
 };
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index e5c64f6..7b9dbb7 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -25,6 +25,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 
@@ -63,22 +64,21 @@
       mDrainVideoQueuePending(false),
       mAudioQueueGeneration(0),
       mVideoQueueGeneration(0),
-      mFirstAnchorTimeMediaUs(-1),
-      mAnchorTimeMediaUs(-1),
-      mAnchorTimeRealUs(-1),
-      mFlushingAudio(false),
-      mFlushingVideo(false),
+      mAudioFirstAnchorTimeMediaUs(-1),
+      mVideoAnchorTimeMediaUs(-1),
+      mVideoAnchorTimeRealUs(-1),
+      mVideoLateByUs(0ll),
       mHasAudio(false),
       mHasVideo(false),
+      mPauseStartedTimeRealUs(-1),
+      mFlushingAudio(false),
+      mFlushingVideo(false),
       mSyncQueues(false),
       mPaused(false),
-      mPauseStartedTimeRealUs(-1),
       mVideoSampleReceived(false),
       mVideoRenderingStarted(false),
       mVideoRenderingStartGeneration(0),
       mAudioRenderingStartGeneration(0),
-      mLastPositionUpdateUs(-1ll),
-      mVideoLateByUs(0ll),
       mAudioOffloadPauseTimeoutGeneration(0),
       mAudioOffloadTornDown(false) {
     readProperties();
@@ -137,9 +137,9 @@
     Mutex::Autolock autoLock(mLock);
     // CHECK(mAudioQueue.empty());
     // CHECK(mVideoQueue.empty());
-    mFirstAnchorTimeMediaUs = -1;
-    mAnchorTimeMediaUs = -1;
-    mAnchorTimeRealUs = -1;
+    setAudioFirstAnchorTime(-1);
+    setVideoAnchorTime(-1, -1);
+    setVideoLateByUs(0);
     mSyncQueues = false;
 }
 
@@ -165,6 +165,78 @@
     msg->post();
 }
 
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
+    return getCurrentPosition(mediaUs, ALooper::GetNowUs());
+}
+
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs, int64_t nowUs) {
+    Mutex::Autolock autoLock(mTimeLock);
+    if (!mHasAudio && !mHasVideo) {
+        return NO_INIT;
+    }
+
+    int64_t positionUs = 0;
+    if (!mHasAudio) {
+        if (mVideoAnchorTimeMediaUs < 0) {
+            return NO_INIT;
+        }
+        positionUs = (nowUs - mVideoAnchorTimeRealUs) + mVideoAnchorTimeMediaUs;
+
+        if (mPauseStartedTimeRealUs != -1) {
+            positionUs -= (nowUs - mPauseStartedTimeRealUs);
+        }
+    } else {
+        if (mAudioFirstAnchorTimeMediaUs < 0) {
+            return NO_INIT;
+        }
+        positionUs = mAudioFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
+    }
+    *mediaUs = (positionUs <= 0) ? 0 : positionUs;
+    return OK;
+}
+
+void NuPlayer::Renderer::setHasMedia(bool audio) {
+    Mutex::Autolock autoLock(mTimeLock);
+    if (audio) {
+        mHasAudio = true;
+    } else {
+        mHasVideo = true;
+    }
+}
+
+void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
+    Mutex::Autolock autoLock(mTimeLock);
+    mAudioFirstAnchorTimeMediaUs = mediaUs;
+}
+
+void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
+    Mutex::Autolock autoLock(mTimeLock);
+    if (mAudioFirstAnchorTimeMediaUs == -1) {
+        mAudioFirstAnchorTimeMediaUs = mediaUs;
+    }
+}
+
+void NuPlayer::Renderer::setVideoAnchorTime(int64_t mediaUs, int64_t realUs) {
+    Mutex::Autolock autoLock(mTimeLock);
+    mVideoAnchorTimeMediaUs = mediaUs;
+    mVideoAnchorTimeRealUs = realUs;
+}
+
+void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
+    Mutex::Autolock autoLock(mTimeLock);
+    mVideoLateByUs = lateUs;
+}
+
+int64_t NuPlayer::Renderer::getVideoLateByUs() {
+    Mutex::Autolock autoLock(mTimeLock);
+    return mVideoLateByUs;
+}
+
+void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
+    Mutex::Autolock autoLock(mTimeLock);
+    mPauseStartedTimeRealUs = realUs;
+}
+
 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatStopAudioSink:
@@ -388,16 +460,7 @@
             int64_t mediaTimeUs;
             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-            if (mFirstAnchorTimeMediaUs == -1) {
-                mFirstAnchorTimeMediaUs = mediaTimeUs;
-            }
-
-            int64_t nowUs = ALooper::GetNowUs();
-            mAnchorTimeMediaUs =
-                mFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs);
-            mAnchorTimeRealUs = nowUs;
-
-            notifyPosition();
+            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
         }
 
         size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -468,15 +531,8 @@
             int64_t mediaTimeUs;
             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-            if (mFirstAnchorTimeMediaUs == -1) {
-                mFirstAnchorTimeMediaUs = mediaTimeUs;
-            }
-            mAnchorTimeMediaUs = mediaTimeUs;
 
-            int64_t nowUs = ALooper::GetNowUs();
-            mAnchorTimeRealUs = nowUs + getPendingAudioPlayoutDurationUs(nowUs);
-
-            notifyPosition();
+            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
         }
 
         size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -534,6 +590,14 @@
     return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
 }
 
+int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
+    int64_t currentPositionUs;
+    if (getCurrentPosition(&currentPositionUs, nowUs) != OK) {
+        currentPositionUs = 0;
+    }
+    return (mediaTimeUs - currentPositionUs) + nowUs;
+}
+
 void NuPlayer::Renderer::postDrainVideoQueue() {
     if (mDrainVideoQueuePending
             || mSyncQueues
@@ -568,21 +632,11 @@
         int64_t mediaTimeUs;
         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
 
-        if (mFirstAnchorTimeMediaUs == -1 && !mHasAudio) {
-            mFirstAnchorTimeMediaUs = mediaTimeUs;
-        }
-        if (mAnchorTimeMediaUs < 0) {
-            if (!mHasAudio) {
-                mAnchorTimeMediaUs = mediaTimeUs;
-                mAnchorTimeRealUs = nowUs;
-                if (!mPaused || mVideoSampleReceived) {
-                    notifyPosition();
-                }
-            }
+        if (mVideoAnchorTimeMediaUs < 0) {
+            setVideoAnchorTime(mediaTimeUs, nowUs);
             realTimeUs = nowUs;
         } else {
-            realTimeUs =
-                (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
+            realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
         }
     }
 
@@ -618,10 +672,11 @@
         mVideoQueue.erase(mVideoQueue.begin());
         entry = NULL;
 
-        mVideoLateByUs = 0ll;
+        setVideoLateByUs(0);
         return;
     }
 
+    int64_t nowUs = -1;
     int64_t realTimeUs;
     if (mFlags & FLAG_REAL_TIME) {
         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
@@ -629,13 +684,17 @@
         int64_t mediaTimeUs;
         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
 
-        realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
+        nowUs = ALooper::GetNowUs();
+        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
     }
 
     bool tooLate = false;
 
     if (!mPaused) {
-        mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
+        if (nowUs == -1) {
+            nowUs = ALooper::GetNowUs();
+        }
+        setVideoLateByUs(nowUs - realTimeUs);
         tooLate = (mVideoLateByUs > 40000);
 
         if (tooLate) {
@@ -644,13 +703,14 @@
         } else {
             ALOGV("rendering video at media time %.2f secs",
                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
-                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
+                    (realTimeUs + mVideoAnchorTimeMediaUs - mVideoAnchorTimeRealUs)) / 1E6);
         }
     } else {
-        mVideoLateByUs = 0ll;
-        if (!mHasAudio && !mVideoSampleReceived) {
-            mAnchorTimeMediaUs = -1;
-            mAnchorTimeRealUs = -1;
+        setVideoLateByUs(0);
+        if (!mVideoSampleReceived) {
+            // This will ensure that the first frame after a flush won't be used as anchor
+            // when renderer is in paused state, because resume can happen any time after seek.
+            setVideoAnchorTime(-1, -1);
         }
     }
 
@@ -693,10 +753,9 @@
     int32_t audio;
     CHECK(msg->findInt32("audio", &audio));
 
-    if (audio) {
-        mHasAudio = true;
-    } else {
-        mHasVideo = true;
+    setHasMedia(audio);
+
+    if (mHasVideo) {
         if (mVideoScheduler == NULL) {
             mVideoScheduler = new VideoFrameScheduler();
             mVideoScheduler->init();
@@ -837,9 +896,7 @@
     {
          Mutex::Autolock autoLock(mLock);
          syncQueuesDone_l();
-         if (!mHasAudio) {
-             mPauseStartedTimeRealUs = -1;
-         }
+         setPauseStartedTimeRealUs(-1);
     }
 
     ALOGV("flushing %s", audio ? "audio" : "video");
@@ -852,7 +909,7 @@
             prepareForMediaRenderingStart();
 
             if (offloadingAudio()) {
-                mFirstAnchorTimeMediaUs = -1;
+                setAudioFirstAnchorTime(-1);
             }
         }
 
@@ -943,42 +1000,6 @@
     ++mAudioQueueGeneration;
 }
 
-void NuPlayer::Renderer::notifyPosition() {
-    // notifyPosition() must be called only after setting mAnchorTimeRealUs
-    // and mAnchorTimeMediaUs, and must not be paused as it extrapolates position.
-    //CHECK_GE(mAnchorTimeRealUs, 0);
-    //CHECK_GE(mAnchorTimeMediaUs, 0);
-    //CHECK(!mPaused || !mHasAudio);  // video-only does display in paused mode.
-
-    int64_t nowUs = ALooper::GetNowUs();
-
-    if (mLastPositionUpdateUs >= 0
-            && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
-        return;
-    }
-    mLastPositionUpdateUs = nowUs;
-
-    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
-
-    //ALOGD("notifyPosition: positionUs(%lld) nowUs(%lld) mAnchorTimeRealUs(%lld)"
-    //        " mAnchorTimeMediaUs(%lld) mFirstAnchorTimeMediaUs(%lld)",
-    //        (long long)positionUs, (long long)nowUs, (long long)mAnchorTimeRealUs,
-    //        (long long)mAnchorTimeMediaUs, (long long)mFirstAnchorTimeMediaUs);
-
-    // Due to adding the latency to mAnchorTimeRealUs in onDrainAudioQueue(),
-    // positionUs may be less than the first media time.  This is avoided
-    // here to prevent potential retrograde motion of the position bar
-    // when starting up after a seek.
-    if (positionUs < mFirstAnchorTimeMediaUs) {
-        positionUs = mFirstAnchorTimeMediaUs;
-    }
-    sp<AMessage> notify = mNotify->dup();
-    notify->setInt32("what", kWhatPosition);
-    notify->setInt64("positionUs", positionUs);
-    notify->setInt64("videoLateByUs", mVideoLateByUs);
-    notify->post();
-}
-
 void NuPlayer::Renderer::onPause() {
     if (mPaused) {
         ALOGW("Renderer::onPause() called while already paused!");
@@ -990,9 +1011,7 @@
         ++mVideoQueueGeneration;
         prepareForMediaRenderingStart();
         mPaused = true;
-        if (!mHasAudio) {
-            mPauseStartedTimeRealUs = ALooper::GetNowUs();
-        }
+        setPauseStartedTimeRealUs(ALooper::GetNowUs());
     }
 
     mDrainAudioQueuePending = false;
@@ -1021,9 +1040,11 @@
 
     Mutex::Autolock autoLock(mLock);
     mPaused = false;
-    if (!mHasAudio && mPauseStartedTimeRealUs != -1) {
-        mAnchorTimeRealUs += ALooper::GetNowUs() - mPauseStartedTimeRealUs;
-        mPauseStartedTimeRealUs = -1;
+    if (mPauseStartedTimeRealUs != -1) {
+        int64_t newAnchorRealUs =
+            mVideoAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
+        setVideoAnchorTime(mVideoAnchorTimeMediaUs, newAnchorRealUs);
+        setPauseStartedTimeRealUs(-1);
     }
 
     if (!mAudioQueue.empty()) {
@@ -1045,7 +1066,6 @@
 // TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
 // as it acquires locks and may query the audio driver.
 //
-// Some calls are not needed since notifyPosition() doesn't always deliver a message.
 // Some calls could conceivably retrieve extrapolated data instead of
 // accessing getTimestamp() or getPosition() every time a data buffer with
 // a media time is received.
@@ -1113,15 +1133,11 @@
     }
     mAudioOffloadTornDown = true;
 
-    int64_t firstAudioTimeUs;
-    {
-        Mutex::Autolock autoLock(mLock);
-        firstAudioTimeUs = mFirstAnchorTimeMediaUs;
+    int64_t currentPositionUs;
+    if (getCurrentPosition(&currentPositionUs) != OK) {
+        currentPositionUs = 0;
     }
 
-    int64_t currentPositionUs =
-        firstAudioTimeUs + getPlayedOutAudioDurationUs(ALooper::GetNowUs());
-
     mAudioSink->stop();
     mAudioSink->flush();
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index d27c238..db1dab6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -59,6 +59,17 @@
 
     void setVideoFrameRate(float fps);
 
+    // Following setters and getters are protected by mTimeLock.
+    status_t getCurrentPosition(int64_t *mediaUs);
+    status_t getCurrentPosition(int64_t *mediaUs, int64_t nowUs);
+    void setHasMedia(bool audio);
+    void setAudioFirstAnchorTime(int64_t mediaUs);
+    void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
+    void setVideoAnchorTime(int64_t mediaUs, int64_t realUs);
+    void setVideoLateByUs(int64_t lateUs);
+    int64_t getVideoLateByUs();
+    void setPauseStartedTimeRealUs(int64_t realUs);
+
     enum {
         kWhatEOS                 = 'eos ',
         kWhatFlushComplete       = 'fluC',
@@ -117,27 +128,33 @@
     int32_t mAudioQueueGeneration;
     int32_t mVideoQueueGeneration;
 
-    int64_t mFirstAnchorTimeMediaUs;
-    int64_t mAnchorTimeMediaUs;
-    int64_t mAnchorTimeRealUs;
+    Mutex mTimeLock;
+    // |mTimeLock| protects the following 7 member vars that are related to time.
+    // Note: those members are only written on Renderer thread, so reading on Renderer thread
+    // doesn't need to be protected. Otherwise accessing those members must be protected by
+    // |mTimeLock|.
+    // TODO: move those members to a seperated media clock class.
+    int64_t mAudioFirstAnchorTimeMediaUs;
+    int64_t mVideoAnchorTimeMediaUs;
+    int64_t mVideoAnchorTimeRealUs;
+    int64_t mVideoLateByUs;
+    bool mHasAudio;
+    bool mHasVideo;
+    int64_t mPauseStartedTimeRealUs;
 
     Mutex mFlushLock;  // protects the following 2 member vars.
     bool mFlushingAudio;
     bool mFlushingVideo;
 
-    bool mHasAudio;
-    bool mHasVideo;
     bool mSyncQueues;
 
     bool mPaused;
-    int64_t mPauseStartedTimeRealUs;
     bool mVideoSampleReceived;
     bool mVideoRenderingStarted;
     int32_t mVideoRenderingStartGeneration;
     int32_t mAudioRenderingStartGeneration;
 
     int64_t mLastPositionUpdateUs;
-    int64_t mVideoLateByUs;
 
     int32_t mAudioOffloadPauseTimeoutGeneration;
     bool mAudioOffloadTornDown;
@@ -149,6 +166,8 @@
     int64_t getPlayedOutAudioDurationUs(int64_t nowUs);
     void postDrainAudioQueue_l(int64_t delayUs = 0);
 
+    int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);
+
     void onDrainVideoQueue();
     void postDrainVideoQueue();
 
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9b11ded..2048808 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -3200,12 +3200,20 @@
                                         sizeof(describeParams.sMediaImage)));
                     }
 
+                    if (portIndex != kPortIndexOutput) {
+                        // TODO: also get input crop
+                        break;
+                    }
+
                     OMX_CONFIG_RECTTYPE rect;
                     InitOMXParams(&rect);
-                    rect.nPortIndex = kPortIndexOutput;
+                    rect.nPortIndex = portIndex;
 
                     if (mOMX->getConfig(
-                                mNode, OMX_IndexConfigCommonOutputCrop,
+                                mNode,
+                                (portIndex == kPortIndexOutput ?
+                                        OMX_IndexConfigCommonOutputCrop :
+                                        OMX_IndexConfigCommonInputCrop),
                                 &rect, sizeof(rect)) != OK) {
                         rect.nLeft = 0;
                         rect.nTop = 0;
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 0f4a00d..ed3dca0 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -111,36 +111,6 @@
     return BAD_VALUE;
 }
 
-inline static void ConvertYUV420SemiPlanarToYUV420Planar(
-        uint8_t *inyuv, uint8_t* outyuv,
-        int32_t width, int32_t height) {
-
-    int32_t outYsize = width * height;
-    uint32_t *outy =  (uint32_t *) outyuv;
-    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
-    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
-    /* Y copying */
-    memcpy(outy, inyuv, outYsize);
-
-    /* U & V copying */
-    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
-    for (int32_t i = height >> 1; i > 0; --i) {
-        for (int32_t j = width >> 2; j > 0; --j) {
-            uint32_t temp = *inyuv_4++;
-            uint32_t tempU = temp & 0xFF;
-            tempU = tempU | ((temp >> 8) & 0xFF00);
-
-            uint32_t tempV = (temp >> 8) & 0xFF;
-            tempV = tempV | ((temp >> 16) & 0xFF00);
-
-            // Flip U and V
-            *outcb++ = tempV;
-            *outcr++ = tempU;
-        }
-    }
-}
-
 static void* MallocWrapper(
         void * /* userData */, int32_t size, int32_t /* attrs */) {
     void *ptr = malloc(size);
@@ -178,7 +148,7 @@
             const OMX_CALLBACKTYPE *callbacks,
             OMX_PTR appData,
             OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
+    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
       mVideoWidth(176),
       mVideoHeight(144),
       mVideoFrameRate(30),
@@ -260,9 +230,10 @@
 
     mEncParams->use_overrun_buffer = AVC_OFF;
 
-    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
+            || mStoreMetaDataInBuffers) {
         // Color conversion is needed.
-        CHECK(mInputFrameData == NULL);
+        free(mInputFrameData);
         mInputFrameData =
             (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
         CHECK(mInputFrameData != NULL);
@@ -348,10 +319,10 @@
     PVAVCCleanUpEncoder(mHandle);
     releaseOutputBuffers();
 
-    delete mInputFrameData;
+    free(mInputFrameData);
     mInputFrameData = NULL;
 
-    delete mSliceGroup;
+    free(mSliceGroup);
     mSliceGroup = NULL;
 
     delete mEncParams;
@@ -713,11 +684,7 @@
                     mStoreMetaDataInBuffers ? " true" : "false");
 
             if (mStoreMetaDataInBuffers) {
-                mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
-                if (mInputFrameData == NULL) {
-                    mInputFrameData =
-                            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
-                }
+                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
             }
 
             return OMX_ErrorNone;
@@ -801,8 +768,6 @@
             }
         }
 
-        buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
-
         // Get next input video frame
         if (mReadyForNextFrame) {
             // Save the input buffer info so that it can be
@@ -823,7 +788,7 @@
                 videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
                 videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
                 videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
-                uint8_t *inputData = NULL;
+                const uint8_t *inputData = NULL;
                 if (mStoreMetaDataInBuffers) {
                     if (inHeader->nFilledLen != 8) {
                         ALOGE("MetaData buffer is wrong size! "
@@ -833,8 +798,10 @@
                         return;
                     }
                     inputData =
-                            extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
-                                    &srcBuffer);
+                        extractGraphicBuffer(
+                                mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+                                inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
+                                mVideoWidth, mVideoHeight);
                     if (inputData == NULL) {
                         ALOGE("Unable to extract gralloc buffer in metadata mode");
                         mSignalledError = true;
@@ -843,16 +810,16 @@
                     }
                     // TODO: Verify/convert pixel format enum
                 } else {
-                    inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+                    inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+                    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+                        ConvertYUV420SemiPlanarToYUV420Planar(
+                            inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+                        inputData = mInputFrameData;
+                    }
                 }
 
-                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
-                    ConvertYUV420SemiPlanarToYUV420Planar(
-                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
-                    inputData = mInputFrameData;
-                }
                 CHECK(inputData != NULL);
-                videoInput.YCbCr[0] = inputData;
+                videoInput.YCbCr[0] = (uint8_t *)inputData;
                 videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
                 videoInput.YCbCr[2] = videoInput.YCbCr[1] +
                     ((videoInput.height * videoInput.pitch) >> 2);
@@ -869,14 +836,12 @@
                     if (encoderStatus < AVCENC_SUCCESS) {
                         ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
                         mSignalledError = true;
-                        releaseGrallocData(srcBuffer);
                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
                         return;
                     } else {
                         ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
                         inQueue.erase(inQueue.begin());
                         inInfo->mOwnedByUs = false;
-                        releaseGrallocData(srcBuffer);
                         notifyEmptyBufferDone(inHeader);
                         return;
                     }
@@ -916,7 +881,6 @@
             if (encoderStatus < AVCENC_SUCCESS) {
                 ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
                 mSignalledError = true;
-                releaseGrallocData(srcBuffer);
                 notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
                 return;
             }
@@ -926,7 +890,6 @@
 
         inQueue.erase(inQueue.begin());
         inInfo->mOwnedByUs = false;
-        releaseGrallocData(srcBuffer);
         notifyEmptyBufferDone(inHeader);
 
         outQueue.erase(outQueue.begin());
@@ -974,47 +937,6 @@
     ALOGV("signalBufferReturned: %p", buffer);
 }
 
-OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
-        const char *name, OMX_INDEXTYPE *index) {
-    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
-        *(int32_t*)index = kStoreMetaDataExtensionIndex;
-        return OMX_ErrorNone;
-    }
-    return OMX_ErrorUndefined;
-}
-
-uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
-    OMX_U32 type = *(OMX_U32*)data;
-    status_t res;
-    if (type != kMetadataBufferTypeGrallocSource) {
-        ALOGE("Data passed in with metadata mode does not have type "
-                "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
-                kMetadataBufferTypeGrallocSource, type);
-        return NULL;
-    }
-    buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
-
-    const Rect rect(mVideoWidth, mVideoHeight);
-    uint8_t *img;
-    res = GraphicBufferMapper::get().lock(imgBuffer,
-            GRALLOC_USAGE_HW_VIDEO_ENCODER,
-            rect, (void**)&img);
-    if (res != OK) {
-        ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
-                imgBuffer);
-        return NULL;
-    }
-
-    *buffer = imgBuffer;
-    return img;
-}
-
-void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
-    if (mStoreMetaDataInBuffers) {
-        GraphicBufferMapper::get().unlock(buffer);
-    }
-}
-
 }  // namespace android
 
 android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
index cfa9ca5..130593f 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -22,14 +22,14 @@
 #include <utils/Vector.h>
 
 #include "avcenc_api.h"
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
 
 namespace android {
 
 struct MediaBuffer;
 
 struct SoftAVCEncoder : public MediaBufferObserver,
-                        public SimpleSoftOMXComponent {
+                        public SoftVideoEncoderOMXComponent {
     SoftAVCEncoder(
             const char *name,
             const OMX_CALLBACKTYPE *callbacks,
@@ -45,11 +45,6 @@
 
     virtual void onQueueFilled(OMX_U32 portIndex);
 
-    // Override SoftOMXComponent methods
-
-    virtual OMX_ERRORTYPE getExtensionIndex(
-            const char *name, OMX_INDEXTYPE *index);
-
     // Implement MediaBufferObserver
     virtual void signalBufferReturned(MediaBuffer *buffer);
 
@@ -105,9 +100,6 @@
     OMX_ERRORTYPE releaseEncoder();
     void releaseOutputBuffers();
 
-    uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
-    void releaseGrallocData(buffer_handle_t buffer);
-
     DISALLOW_EVIL_CONSTRUCTORS(SoftAVCEncoder);
 };
 
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 42c9956..c87d19c 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -46,42 +46,12 @@
     params->nVersion.s.nStep = 0;
 }
 
-inline static void ConvertYUV420SemiPlanarToYUV420Planar(
-        uint8_t *inyuv, uint8_t* outyuv,
-        int32_t width, int32_t height) {
-
-    int32_t outYsize = width * height;
-    uint32_t *outy =  (uint32_t *) outyuv;
-    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
-    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
-    /* Y copying */
-    memcpy(outy, inyuv, outYsize);
-
-    /* U & V copying */
-    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
-    for (int32_t i = height >> 1; i > 0; --i) {
-        for (int32_t j = width >> 2; j > 0; --j) {
-            uint32_t temp = *inyuv_4++;
-            uint32_t tempU = temp & 0xFF;
-            tempU = tempU | ((temp >> 8) & 0xFF00);
-
-            uint32_t tempV = (temp >> 8) & 0xFF;
-            tempV = tempV | ((temp >> 16) & 0xFF00);
-
-            // Flip U and V
-            *outcb++ = tempV;
-            *outcr++ = tempU;
-        }
-    }
-}
-
 SoftMPEG4Encoder::SoftMPEG4Encoder(
             const char *name,
             const OMX_CALLBACKTYPE *callbacks,
             OMX_PTR appData,
             OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
+    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
       mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
       mVideoWidth(176),
       mVideoHeight(144),
@@ -149,9 +119,10 @@
     mEncParams->quantType[0] = 0;
     mEncParams->noFrameSkipped = PV_OFF;
 
-    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
+            || mStoreMetaDataInBuffers) {
         // Color conversion is needed.
-        CHECK(mInputFrameData == NULL);
+        free(mInputFrameData);
         mInputFrameData =
             (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
         CHECK(mInputFrameData != NULL);
@@ -216,7 +187,7 @@
 
     PVCleanUpVideoEncoder(mHandle);
 
-    delete mInputFrameData;
+    free(mInputFrameData);
     mInputFrameData = NULL;
 
     delete mEncParams;
@@ -486,6 +457,17 @@
                 mVideoHeight = def->format.video.nFrameHeight;
                 mVideoFrameRate = def->format.video.xFramerate >> 16;
                 mVideoColorFormat = def->format.video.eColorFormat;
+
+                OMX_PARAM_PORTDEFINITIONTYPE *portDef =
+                    &editPortInfo(0)->mDef;
+                portDef->format.video.nFrameWidth = mVideoWidth;
+                portDef->format.video.nFrameHeight = mVideoHeight;
+                portDef->format.video.xFramerate = def->format.video.xFramerate;
+                portDef->format.video.eColorFormat =
+                    (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
+                portDef = &editPortInfo(1)->mDef;
+                portDef->format.video.nFrameWidth = mVideoWidth;
+                portDef->format.video.nFrameHeight = mVideoHeight;
             } else {
                 mVideoBitRate = def->format.video.nBitrate;
             }
@@ -607,11 +589,7 @@
                     mStoreMetaDataInBuffers ? " true" : "false");
 
             if (mStoreMetaDataInBuffers) {
-                mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
-                if (mInputFrameData == NULL) {
-                    mInputFrameData =
-                            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
-                }
+                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
             }
 
             return OMX_ErrorNone;
@@ -679,9 +657,8 @@
             mSawInputEOS = true;
         }
 
-        buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
         if (inHeader->nFilledLen > 0) {
-            uint8_t *inputData = NULL;
+            const uint8_t *inputData = NULL;
             if (mStoreMetaDataInBuffers) {
                 if (inHeader->nFilledLen != 8) {
                     ALOGE("MetaData buffer is wrong size! "
@@ -691,24 +668,25 @@
                     return;
                 }
                 inputData =
-                        extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
-                                &srcBuffer);
+                    extractGraphicBuffer(
+                            mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+                            inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
+                            mVideoWidth, mVideoHeight);
                 if (inputData == NULL) {
                     ALOGE("Unable to extract gralloc buffer in metadata mode");
                     mSignalledError = true;
                     notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
                         return;
                 }
-                // TODO: Verify/convert pixel format enum
             } else {
-                inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+                inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+                    ConvertYUV420SemiPlanarToYUV420Planar(
+                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+                    inputData = mInputFrameData;
+                }
             }
 
-            if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
-                ConvertYUV420SemiPlanarToYUV420Planar(
-                    inputData, mInputFrameData, mVideoWidth, mVideoHeight);
-                inputData = mInputFrameData;
-            }
             CHECK(inputData != NULL);
 
             VideoEncFrameIO vin, vout;
@@ -717,7 +695,7 @@
             vin.height = ((mVideoHeight  + 15) >> 4) << 4;
             vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
             vin.timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
-            vin.yChan = inputData;
+            vin.yChan = (uint8_t *)inputData;
             vin.uChan = vin.yChan + vin.height * vin.pitch;
             vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
 
@@ -744,7 +722,6 @@
 
         inQueue.erase(inQueue.begin());
         inInfo->mOwnedByUs = false;
-        releaseGrallocData(srcBuffer);
         notifyEmptyBufferDone(inHeader);
 
         outQueue.erase(outQueue.begin());
@@ -759,47 +736,6 @@
     }
 }
 
-OMX_ERRORTYPE SoftMPEG4Encoder::getExtensionIndex(
-        const char *name, OMX_INDEXTYPE *index) {
-    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
-        *(int32_t*)index = kStoreMetaDataExtensionIndex;
-        return OMX_ErrorNone;
-    }
-    return OMX_ErrorUndefined;
-}
-
-uint8_t *SoftMPEG4Encoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
-    OMX_U32 type = *(OMX_U32*)data;
-    status_t res;
-    if (type != kMetadataBufferTypeGrallocSource) {
-        ALOGE("Data passed in with metadata mode does not have type "
-                "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
-                kMetadataBufferTypeGrallocSource, type);
-        return NULL;
-    }
-    buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
-
-    const Rect rect(mVideoWidth, mVideoHeight);
-    uint8_t *img;
-    res = GraphicBufferMapper::get().lock(imgBuffer,
-            GRALLOC_USAGE_HW_VIDEO_ENCODER,
-            rect, (void**)&img);
-    if (res != OK) {
-        ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
-                imgBuffer);
-        return NULL;
-    }
-
-    *buffer = imgBuffer;
-    return img;
-}
-
-void SoftMPEG4Encoder::releaseGrallocData(buffer_handle_t buffer) {
-    if (mStoreMetaDataInBuffers) {
-        GraphicBufferMapper::get().unlock(buffer);
-    }
-}
-
 }  // namespace android
 
 android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index c59a1b9..b0605b4 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -19,7 +19,7 @@
 
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/foundation/ABase.h>
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
 #include "mp4enc_api.h"
 
 
@@ -27,7 +27,7 @@
 
 struct MediaBuffer;
 
-struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {
+struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
     SoftMPEG4Encoder(
             const char *name,
             const OMX_CALLBACKTYPE *callbacks,
@@ -43,11 +43,6 @@
 
     virtual void onQueueFilled(OMX_U32 portIndex);
 
-    // Override SoftOMXComponent methods
-
-    virtual OMX_ERRORTYPE getExtensionIndex(
-            const char *name, OMX_INDEXTYPE *index);
-
 protected:
     virtual ~SoftMPEG4Encoder();
 
@@ -86,9 +81,6 @@
     OMX_ERRORTYPE initEncoder();
     OMX_ERRORTYPE releaseEncoder();
 
-    uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
-    void releaseGrallocData(buffer_handle_t buffer);
-
     DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4Encoder);
 };
 
diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk
index 4060a0a..e265104 100644
--- a/media/libstagefright/codecs/on2/enc/Android.mk
+++ b/media/libstagefright/codecs/on2/enc/Android.mk
@@ -12,10 +12,6 @@
         frameworks/av/media/libstagefright/include \
         frameworks/native/include/media/openmax \
 
-ifeq ($(TARGET_DEVICE), manta)
-    LOCAL_CFLAGS += -DSURFACE_IS_BGR32
-endif
-
 LOCAL_STATIC_LIBRARIES := \
         libvpx
 
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index cabd6bd..eb621d5 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -50,90 +50,11 @@
     return cpuCoreCount;
 }
 
-
-// This color conversion utility is copied from SoftMPEG4Encoder.cpp
-inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv,
-                                             uint8_t* outyuv,
-                                             int32_t width,
-                                             int32_t height) {
-    int32_t outYsize = width * height;
-    uint32_t *outy =  (uint32_t *) outyuv;
-    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
-    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
-    /* Y copying */
-    memcpy(outy, inyuv, outYsize);
-
-    /* U & V copying */
-    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
-    for (int32_t i = height >> 1; i > 0; --i) {
-        for (int32_t j = width >> 2; j > 0; --j) {
-            uint32_t temp = *inyuv_4++;
-            uint32_t tempU = temp & 0xFF;
-            tempU = tempU | ((temp >> 8) & 0xFF00);
-
-            uint32_t tempV = (temp >> 8) & 0xFF;
-            tempV = tempV | ((temp >> 16) & 0xFF00);
-
-            // Flip U and V
-            *outcb++ = tempV;
-            *outcr++ = tempU;
-        }
-    }
-}
-
-static void ConvertRGB32ToPlanar(
-        const uint8_t *src, uint8_t *dstY, int32_t width, int32_t height) {
-    CHECK((width & 1) == 0);
-    CHECK((height & 1) == 0);
-
-    uint8_t *dstU = dstY + width * height;
-    uint8_t *dstV = dstU + (width / 2) * (height / 2);
-
-    for (int32_t y = 0; y < height; ++y) {
-        for (int32_t x = 0; x < width; ++x) {
-#ifdef SURFACE_IS_BGR32
-            unsigned blue = src[4 * x];
-            unsigned green = src[4 * x + 1];
-            unsigned red= src[4 * x + 2];
-#else
-            unsigned red= src[4 * x];
-            unsigned green = src[4 * x + 1];
-            unsigned blue = src[4 * x + 2];
-#endif
-
-            unsigned luma =
-                ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
-
-            dstY[x] = luma;
-
-            if ((x & 1) == 0 && (y & 1) == 0) {
-                unsigned U =
-                    ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
-
-                unsigned V =
-                    ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
-
-                dstU[x / 2] = U;
-                dstV[x / 2] = V;
-            }
-        }
-
-        if ((y & 1) == 0) {
-            dstU += width / 2;
-            dstV += width / 2;
-        }
-
-        src += 4 * width;
-        dstY += width;
-    }
-}
-
 SoftVPXEncoder::SoftVPXEncoder(const char *name,
                                const OMX_CALLBACKTYPE *callbacks,
                                OMX_PTR appData,
                                OMX_COMPONENTTYPE **component)
-    : SimpleSoftOMXComponent(name, callbacks, appData, component),
+    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
       mCodecContext(NULL),
       mCodecConfiguration(NULL),
       mCodecInterface(NULL),
@@ -157,7 +78,6 @@
       mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
       mConversionBuffer(NULL),
       mInputDataIsMeta(false),
-      mGrallocModule(NULL),
       mKeyFrameRequested(false) {
     memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
     mTemporalLayerBitrateRatio[0] = 100;
@@ -447,13 +367,12 @@
         }
     }
 
-    if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) {
+    if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
+        free(mConversionBuffer);
+        mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
         if (mConversionBuffer == NULL) {
-            mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
-            if (mConversionBuffer == NULL) {
-                ALOGE("Allocating conversion buffer failed.");
-                return UNKNOWN_ERROR;
-            }
+            ALOGE("Allocating conversion buffer failed.");
+            return UNKNOWN_ERROR;
         }
     }
     return OK;
@@ -473,7 +392,7 @@
     }
 
     if (mConversionBuffer != NULL) {
-        delete mConversionBuffer;
+        free(mConversionBuffer);
         mConversionBuffer = NULL;
     }
 
@@ -1035,49 +954,28 @@
             return;
         }
 
-        uint8_t *source =
+        const uint8_t *source =
             inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
 
         if (mInputDataIsMeta) {
-            CHECK_GE(inputBufferHeader->nFilledLen,
-                     4 + sizeof(buffer_handle_t));
-
-            uint32_t bufferType = *(uint32_t *)source;
-            CHECK_EQ(bufferType, kMetadataBufferTypeGrallocSource);
-
-            if (mGrallocModule == NULL) {
-                CHECK_EQ(0, hw_get_module(
-                            GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule));
+            source = extractGraphicBuffer(
+                    mConversionBuffer, mWidth * mHeight * 3 / 2,
+                    source, inputBufferHeader->nFilledLen,
+                    mWidth, mHeight);
+            if (source == NULL) {
+                ALOGE("Unable to extract gralloc buffer in metadata mode");
+                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+                return;
             }
-
-            const gralloc_module_t *grmodule =
-                (const gralloc_module_t *)mGrallocModule;
-
-            buffer_handle_t handle = *(buffer_handle_t *)(source + 4);
-
-            void *bits;
-            CHECK_EQ(0,
-                     grmodule->lock(
-                         grmodule, handle,
-                         GRALLOC_USAGE_SW_READ_OFTEN
-                            | GRALLOC_USAGE_SW_WRITE_NEVER,
-                         0, 0, mWidth, mHeight, &bits));
-
-            ConvertRGB32ToPlanar(
-                    (const uint8_t *)bits, mConversionBuffer, mWidth, mHeight);
-
-            source = mConversionBuffer;
-
-            CHECK_EQ(0, grmodule->unlock(grmodule, handle));
         } else if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
-            ConvertSemiPlanarToPlanar(
+            ConvertYUV420SemiPlanarToYUV420Planar(
                     source, mConversionBuffer, mWidth, mHeight);
 
             source = mConversionBuffer;
         }
         vpx_image_t raw_frame;
         vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight,
-                     kInputBufferAlignment, source);
+                     kInputBufferAlignment, (uint8_t *)source);
 
         vpx_enc_frame_flags_t flags = 0;
         if (mTemporalPatternLength > 0) {
@@ -1153,15 +1051,6 @@
     }
 }
 
-OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex(
-        const char *name, OMX_INDEXTYPE *index) {
-    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
-        *(int32_t*)index = kStoreMetaDataExtensionIndex;
-        return OMX_ErrorNone;
-    }
-    return SimpleSoftOMXComponent::getExtensionIndex(name, index);
-}
-
 }  // namespace android
 
 
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 5b4c954..f4c1564 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -18,7 +18,7 @@
 
 #define SOFT_VPX_ENCODER_H_
 
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
 
 #include <OMX_VideoExt.h>
 #include <OMX_IndexExt.h>
@@ -59,7 +59,7 @@
 //    - OMX timestamps are in microseconds, therefore
 // encoder timebase is fixed to 1/1000000
 
-struct SoftVPXEncoder : public SimpleSoftOMXComponent {
+struct SoftVPXEncoder : public SoftVideoEncoderOMXComponent {
     SoftVPXEncoder(const char *name,
                    const OMX_CALLBACKTYPE *callbacks,
                    OMX_PTR appData,
@@ -87,9 +87,6 @@
     // encoding of the frame
     virtual void onQueueFilled(OMX_U32 portIndex);
 
-    virtual OMX_ERRORTYPE getExtensionIndex(
-            const char *name, OMX_INDEXTYPE *index);
-
 private:
     enum TemporalReferences {
         // For 1 layer case: reference all (last, golden, and alt ref), but only
@@ -233,7 +230,6 @@
     uint8_t* mConversionBuffer;
 
     bool mInputDataIsMeta;
-    const hw_module_t *mGrallocModule;
 
     bool mKeyFrameRequested;
 
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index dc6d410..24d431c 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -182,7 +182,9 @@
             OMX_IN OMX_PTR pAppData,
             OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
 
-    status_t storeMetaDataInBuffers_l(OMX_U32 portIndex, OMX_BOOL enable);
+    status_t storeMetaDataInBuffers_l(
+            OMX_U32 portIndex, OMX_BOOL enable,
+            OMX_BOOL useGraphicBuffer, OMX_BOOL *usingGraphicBufferInMeta);
 
     sp<GraphicBufferSource> getGraphicBufferSource();
     void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource);
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
new file mode 100644
index 0000000..b3b810d
--- /dev/null
+++ b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+
+#define SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+
+#include "SimpleSoftOMXComponent.h"
+#include <system/window.h>
+
+struct hw_module_t;
+
+namespace android {
+
+struct SoftVideoEncoderOMXComponent : public SimpleSoftOMXComponent {
+    SoftVideoEncoderOMXComponent(
+            const char *name,
+            const OMX_CALLBACKTYPE *callbacks,
+            OMX_PTR appData,
+            OMX_COMPONENTTYPE **component);
+
+protected:
+    static void ConvertFlexYUVToPlanar(
+            uint8_t *dst, size_t dstStride, size_t dstVStride,
+            struct android_ycbcr *ycbcr, int32_t width, int32_t height);
+
+    static void ConvertYUV420SemiPlanarToYUV420Planar(
+            const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height);
+
+    static void ConvertRGB32ToPlanar(
+        uint8_t *dstY, size_t dstStride, size_t dstVStride,
+        const uint8_t *src, size_t width, size_t height, size_t srcStride,
+        bool bgr);
+
+    const uint8_t *extractGraphicBuffer(
+            uint8_t *dst, size_t dstSize, const uint8_t *src, size_t srcSize,
+            size_t width, size_t height) const;
+
+    virtual OMX_ERRORTYPE getExtensionIndex(const char *name, OMX_INDEXTYPE *index);
+
+    enum {
+        kInputPortIndex = 0,
+        kOutputPortIndex = 1,
+    };
+
+private:
+    mutable const hw_module_t *mGrallocModule;
+
+    DISALLOW_EVIL_CONSTRUCTORS(SoftVideoEncoderOMXComponent);
+};
+
+}  // namespace android
+
+#endif  // SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index cd912e7..aaa8334 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -1,6 +1,10 @@
 LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
 
+ifeq ($(TARGET_DEVICE), manta)
+    LOCAL_CFLAGS += -DSURFACE_IS_BGR32
+endif
+
 LOCAL_SRC_FILES:=                     \
         GraphicBufferSource.cpp       \
         OMX.cpp                       \
@@ -10,6 +14,7 @@
         SoftOMXComponent.cpp          \
         SoftOMXPlugin.cpp             \
         SoftVideoDecoderOMXComponent.cpp \
+        SoftVideoEncoderOMXComponent.cpp \
 
 LOCAL_C_INCLUDES += \
         $(TOP)/frameworks/av/media/libstagefright \
@@ -18,6 +23,7 @@
 
 LOCAL_SHARED_LIBRARIES :=               \
         libbinder                       \
+        libhardware                     \
         libmedia                        \
         libutils                        \
         liblog                          \
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index fad6c33..3e70956 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -37,7 +37,8 @@
 
 
 GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
-        uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount) :
+        uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount,
+        bool useGraphicBufferInMeta) :
     mInitCheck(UNKNOWN_ERROR),
     mNodeInstance(nodeInstance),
     mExecuting(false),
@@ -59,7 +60,8 @@
     mTimePerCaptureUs(-1ll),
     mTimePerFrameUs(-1ll),
     mPrevCaptureUs(-1ll),
-    mPrevFrameUs(-1ll) {
+    mPrevFrameUs(-1ll),
+    mUseGraphicBufferInMeta(useGraphicBufferInMeta) {
 
     ALOGV("GraphicBufferSource w=%u h=%u c=%u",
             bufferWidth, bufferHeight, bufferCount);
@@ -254,13 +256,25 @@
         // Pull the graphic buffer handle back out of the buffer, and confirm
         // that it matches expectations.
         OMX_U8* data = header->pBuffer;
-        buffer_handle_t bufferHandle;
-        memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t));
-        if (bufferHandle != codecBuffer.mGraphicBuffer->handle) {
-            // should never happen
-            ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
-                    bufferHandle, codecBuffer.mGraphicBuffer->handle);
-            CHECK(!"codecBufferEmptied: mismatched buffer");
+        MetadataBufferType type = *(MetadataBufferType *)data;
+        if (type == kMetadataBufferTypeGrallocSource) {
+            buffer_handle_t bufferHandle;
+            memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t));
+            if (bufferHandle != codecBuffer.mGraphicBuffer->handle) {
+                // should never happen
+                ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
+                        bufferHandle, codecBuffer.mGraphicBuffer->handle);
+                CHECK(!"codecBufferEmptied: mismatched buffer");
+            }
+        } else if (type == kMetadataBufferTypeGraphicBuffer) {
+            GraphicBuffer *buffer;
+            memcpy(&buffer, data + 4, sizeof(buffer));
+            if (buffer != codecBuffer.mGraphicBuffer.get()) {
+                // should never happen
+                ALOGE("codecBufferEmptied: buffer is %p, expected %p",
+                        buffer, codecBuffer.mGraphicBuffer.get());
+                CHECK(!"codecBufferEmptied: mismatched buffer");
+            }
         }
     }
 
@@ -642,10 +656,22 @@
     OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
     CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t));
     OMX_U8* data = header->pBuffer;
-    const OMX_U32 type = kMetadataBufferTypeGrallocSource;
-    buffer_handle_t handle = codecBuffer.mGraphicBuffer->handle;
-    memcpy(data, &type, 4);
-    memcpy(data + 4, &handle, sizeof(buffer_handle_t));
+    buffer_handle_t handle;
+    if (!mUseGraphicBufferInMeta) {
+        const OMX_U32 type = kMetadataBufferTypeGrallocSource;
+        handle = codecBuffer.mGraphicBuffer->handle;
+        memcpy(data, &type, 4);
+        memcpy(data + 4, &handle, sizeof(buffer_handle_t));
+    } else {
+        // codecBuffer holds a reference to the GraphicBuffer, so
+        // it is valid while it is with the OMX component
+        const OMX_U32 type = kMetadataBufferTypeGraphicBuffer;
+        memcpy(data, &type, 4);
+        // passing a non-reference-counted graphicBuffer
+        GraphicBuffer *buffer = codecBuffer.mGraphicBuffer.get();
+        handle = buffer->handle;
+        memcpy(data + 4, &buffer, sizeof(buffer));
+    }
 
     status_t err = mNodeInstance->emptyDirectBuffer(header, 0,
             4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME,
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index a70cc1a..c0860ab 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -49,7 +49,8 @@
 class GraphicBufferSource : public BufferQueue::ConsumerListener {
 public:
     GraphicBufferSource(OMXNodeInstance* nodeInstance,
-            uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount);
+            uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount,
+            bool useGraphicBufferInMeta = false);
     virtual ~GraphicBufferSource();
 
     // We can't throw an exception if the constructor fails, so we just set
@@ -271,6 +272,8 @@
     int64_t mPrevCaptureUs;
     int64_t mPrevFrameUs;
 
+    bool mUseGraphicBufferInMeta;
+
     void onMessageReceived(const sp<AMessage> &msg);
 
     DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index efb27f5..d07ec14 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -393,20 +393,39 @@
         OMX_U32 portIndex,
         OMX_BOOL enable) {
     Mutex::Autolock autolock(mLock);
-    return storeMetaDataInBuffers_l(portIndex, enable);
+    return storeMetaDataInBuffers_l(
+            portIndex, enable,
+            OMX_FALSE /* useGraphicBuffer */, NULL /* usingGraphicBufferInMetadata */);
 }
 
 status_t OMXNodeInstance::storeMetaDataInBuffers_l(
         OMX_U32 portIndex,
-        OMX_BOOL enable) {
+        OMX_BOOL enable,
+        OMX_BOOL useGraphicBuffer,
+        OMX_BOOL *usingGraphicBufferInMetadata) {
     OMX_INDEXTYPE index;
     OMX_STRING name = const_cast<OMX_STRING>(
             "OMX.google.android.index.storeMetaDataInBuffers");
 
-    OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+    OMX_STRING graphicBufferName = const_cast<OMX_STRING>(
+            "OMX.google.android.index.storeGraphicBufferInMetaData");
+    if (usingGraphicBufferInMetadata == NULL) {
+        usingGraphicBufferInMetadata = &useGraphicBuffer;
+    }
+
+    OMX_ERRORTYPE err =
+        (useGraphicBuffer && portIndex == kPortIndexInput)
+                ? OMX_GetExtensionIndex(mHandle, graphicBufferName, &index)
+                : OMX_ErrorBadParameter;
+    if (err == OMX_ErrorNone) {
+        *usingGraphicBufferInMetadata = OMX_TRUE;
+    } else {
+        *usingGraphicBufferInMetadata = OMX_FALSE;
+        err = OMX_GetExtensionIndex(mHandle, name, &index);
+    }
+
     if (err != OMX_ErrorNone) {
         ALOGE("OMX_GetExtensionIndex %s failed", name);
-
         return StatusFromOMXError(err);
     }
 
@@ -421,6 +440,7 @@
     params.bStoreMetaData = enable;
     if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
         ALOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+        *usingGraphicBufferInMetadata = OMX_FALSE;
         return UNKNOWN_ERROR;
     }
     return err;
@@ -683,7 +703,10 @@
     }
 
     // Input buffers will hold meta-data (gralloc references).
-    err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE);
+    OMX_BOOL usingGraphicBuffer = OMX_FALSE;
+    err = storeMetaDataInBuffers_l(
+            portIndex, OMX_TRUE,
+            OMX_TRUE /* useGraphicBuffer */, &usingGraphicBuffer);
     if (err != OK) {
         return err;
     }
@@ -709,7 +732,7 @@
 
     GraphicBufferSource* bufferSource = new GraphicBufferSource(
             this, def.format.video.nFrameWidth, def.format.video.nFrameHeight,
-            def.nBufferCountActual);
+            def.nBufferCountActual, usingGraphicBuffer);
     if ((err = bufferSource->initCheck()) != OK) {
         delete bufferSource;
         return err;
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
new file mode 100644
index 0000000..8bff142
--- /dev/null
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -0,0 +1,311 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftVideoEncoderOMXComponent"
+#include <utils/Log.h>
+
+#include "include/SoftVideoEncoderOMXComponent.h"
+
+#include <hardware/gralloc.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
+
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+namespace android {
+
+SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent(
+        const char *name,
+        const OMX_CALLBACKTYPE *callbacks,
+        OMX_PTR appData,
+        OMX_COMPONENTTYPE **component)
+    : SimpleSoftOMXComponent(name, callbacks, appData, component),
+      mGrallocModule(NULL) {
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertFlexYUVToPlanar(
+        uint8_t *dst, size_t dstStride, size_t dstVStride,
+        struct android_ycbcr *ycbcr, int32_t width, int32_t height) {
+    const uint8_t *src = (const uint8_t *)ycbcr->y;
+    const uint8_t *srcU = (const uint8_t *)ycbcr->cb;
+    const uint8_t *srcV = (const uint8_t *)ycbcr->cr;
+    uint8_t *dstU = dst + dstVStride * dstStride;
+    uint8_t *dstV = dstU + (dstVStride >> 1) * (dstStride >> 1);
+
+    for (size_t y = height; y > 0; --y) {
+        memcpy(dst, src, width);
+        dst += dstStride;
+        src += ycbcr->ystride;
+    }
+    if (ycbcr->cstride == ycbcr->ystride >> 1 && ycbcr->chroma_step == 1) {
+        // planar
+        for (size_t y = height >> 1; y > 0; --y) {
+            memcpy(dstU, srcU, width >> 1);
+            dstU += dstStride >> 1;
+            srcU += ycbcr->cstride;
+            memcpy(dstV, srcV, width >> 1);
+            dstV += dstStride >> 1;
+            srcV += ycbcr->cstride;
+        }
+    } else {
+        // arbitrary
+        for (size_t y = height >> 1; y > 0; --y) {
+            for (size_t x = width >> 1; x > 0; --x) {
+                *dstU++ = *srcU;
+                *dstV++ = *srcV;
+                srcU += ycbcr->chroma_step;
+                srcV += ycbcr->chroma_step;
+            }
+            dstU += (dstStride >> 1) - (width >> 1);
+            dstV += (dstStride >> 1) - (width >> 1);
+            srcU += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
+            srcV += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
+        }
+    }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertYUV420SemiPlanarToYUV420Planar(
+        const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height) {
+    // TODO: add support for stride
+    int32_t outYsize = width * height;
+    uint32_t *outY  = (uint32_t *) outYUV;
+    uint16_t *outCb = (uint16_t *) (outYUV + outYsize);
+    uint16_t *outCr = (uint16_t *) (outYUV + outYsize + (outYsize >> 2));
+
+    /* Y copying */
+    memcpy(outY, inYVU, outYsize);
+
+    /* U & V copying */
+    // FIXME this only works if width is multiple of 4
+    uint32_t *inYVU_4 = (uint32_t *) (inYVU + outYsize);
+    for (int32_t i = height >> 1; i > 0; --i) {
+        for (int32_t j = width >> 2; j > 0; --j) {
+            uint32_t temp = *inYVU_4++;
+            uint32_t tempU = temp & 0xFF;
+            tempU = tempU | ((temp >> 8) & 0xFF00);
+
+            uint32_t tempV = (temp >> 8) & 0xFF;
+            tempV = tempV | ((temp >> 16) & 0xFF00);
+
+            *outCb++ = tempU;
+            *outCr++ = tempV;
+        }
+    }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertRGB32ToPlanar(
+        uint8_t *dstY, size_t dstStride, size_t dstVStride,
+        const uint8_t *src, size_t width, size_t height, size_t srcStride,
+        bool bgr) {
+    CHECK((width & 1) == 0);
+    CHECK((height & 1) == 0);
+
+    uint8_t *dstU = dstY + dstStride * dstVStride;
+    uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1);
+
+#ifdef SURFACE_IS_BGR32
+    bgr = !bgr;
+#endif
+
+    const size_t redOffset   = bgr ? 2 : 0;
+    const size_t greenOffset = 1;
+    const size_t blueOffset  = bgr ? 0 : 2;
+
+    for (size_t y = 0; y < height; ++y) {
+        for (size_t x = 0; x < width; ++x) {
+            unsigned red   = src[redOffset];
+            unsigned green = src[greenOffset];
+            unsigned blue  = src[blueOffset];
+
+            // using ITU-R BT.601 conversion matrix
+            unsigned luma =
+                ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
+
+            dstY[x] = luma;
+
+            if ((x & 1) == 0 && (y & 1) == 0) {
+                unsigned U =
+                    ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
+
+                unsigned V =
+                    ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
+
+                dstU[x >> 1] = U;
+                dstV[x >> 1] = V;
+            }
+            src += 4;
+        }
+
+        if ((y & 1) == 0) {
+            dstU += dstStride >> 1;
+            dstV += dstStride >> 1;
+        }
+
+        src += srcStride - 4 * width;
+        dstY += dstStride;
+    }
+}
+
+const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(
+        uint8_t *dst, size_t dstSize,
+        const uint8_t *src, size_t srcSize,
+        size_t width, size_t height) const {
+    size_t dstStride = width;
+    size_t dstVStride = height;
+
+    MetadataBufferType bufferType = *(MetadataBufferType *)src;
+    bool usingGraphicBuffer = bufferType == kMetadataBufferTypeGraphicBuffer;
+    if (!usingGraphicBuffer && bufferType != kMetadataBufferTypeGrallocSource) {
+        ALOGE("Unsupported metadata type (%d)", bufferType);
+        return NULL;
+    }
+
+    if (mGrallocModule == NULL) {
+        CHECK_EQ(0, hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule));
+    }
+
+    const gralloc_module_t *grmodule =
+        (const gralloc_module_t *)mGrallocModule;
+
+    buffer_handle_t handle;
+    int format;
+    size_t srcStride;
+    size_t srcVStride;
+    if (usingGraphicBuffer) {
+        if (srcSize < 4 + sizeof(GraphicBuffer *)) {
+            ALOGE("Metadata is too small (%zu vs %zu)", srcSize, 4 + sizeof(GraphicBuffer *));
+            return NULL;
+        }
+
+        GraphicBuffer *buffer = *(GraphicBuffer **)(src + 4);
+        handle = buffer->handle;
+        format = buffer->format;
+        srcStride = buffer->stride;
+        srcVStride = buffer->height;
+        // convert stride from pixels to bytes
+        if (format != HAL_PIXEL_FORMAT_YV12 &&
+            format != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+            // TODO do we need to support other formats?
+            srcStride *= 4;
+        }
+    } else {
+        // TODO: remove this part.  Check if anyone uses this.
+
+        if (srcSize < 4 + sizeof(buffer_handle_t)) {
+            ALOGE("Metadata is too small (%zu vs %zu)", srcSize, 4 + sizeof(buffer_handle_t));
+            return NULL;
+        }
+
+        handle = *(buffer_handle_t *)(src + 4);
+        // assume HAL_PIXEL_FORMAT_RGBA_8888
+        // there is no way to get the src stride without the graphic buffer
+        format = HAL_PIXEL_FORMAT_RGBA_8888;
+        srcStride = width * 4;
+        srcVStride = height;
+    }
+
+    size_t neededSize =
+        dstStride * dstVStride + (width >> 1)
+                + (dstStride >> 1) * ((dstVStride >> 1) + (height >> 1) - 1);
+    if (dstSize < neededSize) {
+        ALOGE("destination buffer is too small (%zu vs %zu)", dstSize, neededSize);
+        return NULL;
+    }
+
+    void *bits = NULL;
+    struct android_ycbcr ycbcr;
+    status_t res;
+    if (format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+        res = grmodule->lock_ycbcr(
+                 grmodule, handle,
+                 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
+                 0, 0, width, height, &ycbcr);
+    } else {
+        res = grmodule->lock(
+                 grmodule, handle,
+                 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
+                 0, 0, width, height, &bits);
+    }
+    if (res != OK) {
+        ALOGE("Unable to lock image buffer %p for access", handle);
+        return NULL;
+    }
+
+    switch (format) {
+        case HAL_PIXEL_FORMAT_YV12:  // YCrCb / YVU planar
+            // convert to flex YUV
+            ycbcr.y = bits;
+            ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
+            ycbcr.cb = (uint8_t *)ycbcr.cr + (srcStride >> 1) * (srcVStride >> 1);
+            ycbcr.chroma_step = 1;
+            ycbcr.cstride = srcVStride >> 1;
+            ycbcr.ystride = srcVStride;
+            ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+            break;
+        case HAL_PIXEL_FORMAT_YCrCb_420_SP:  // YCrCb / YVU semiplanar, NV21
+            // convert to flex YUV
+            ycbcr.y = bits;
+            ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
+            ycbcr.cb = (uint8_t *)ycbcr.cr + 1;
+            ycbcr.chroma_step = 2;
+            ycbcr.cstride = srcVStride;
+            ycbcr.ystride = srcVStride;
+            ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+            break;
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+            ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+            break;
+        case HAL_PIXEL_FORMAT_RGBA_8888:
+        case HAL_PIXEL_FORMAT_BGRA_8888:
+            ConvertRGB32ToPlanar(
+                    dst, dstStride, dstVStride,
+                    (const uint8_t *)bits, width, height, srcStride,
+                    format == HAL_PIXEL_FORMAT_BGRA_8888);
+            break;
+        default:
+            ALOGE("Unsupported pixel format %#x", format);
+            dst = NULL;
+            break;
+    }
+
+    if (grmodule->unlock(grmodule, handle) != OK) {
+        ALOGE("Unable to unlock image buffer %p for access", handle);
+    }
+
+    return dst;
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex(
+        const char *name, OMX_INDEXTYPE *index) {
+    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") ||
+        !strcmp(name, "OMX.google.android.index.storeGraphicBufferInMetaData")) {
+        *(int32_t*)index = kStoreMetaDataExtensionIndex;
+        return OMX_ErrorNone;
+    }
+    return SimpleSoftOMXComponent::getExtensionIndex(name, index);
+}
+
+}  // namespace android