am aabc3e13: (-s ours) DO NOT MERGE: Hack this copy of our internal sources to match changes to support A/V sync.

* commit 'aabc3e13ddf230dd0c9ad5edc0c3eb2687d671cb':
  DO NOT MERGE: Hack this copy of our internal sources to match changes to support A/V sync.
diff --git a/libvideoeditor/lvpp/DummyAudioSource.cpp b/libvideoeditor/lvpp/DummyAudioSource.cpp
index 04f8dc9..70f5944 100755
--- a/libvideoeditor/lvpp/DummyAudioSource.cpp
+++ b/libvideoeditor/lvpp/DummyAudioSource.cpp
@@ -97,7 +97,12 @@
     LOG2("DummyAudioSource::~DummyAudioSource");
 }
 
-
+void DummyAudioSource::setDuration (int64_t audioDurationUs) {
+    Mutex::Autolock autoLock(mLock);
+    LOG2("SetDuration %lld", mAudioDurationUs);
+    mAudioDurationUs += audioDurationUs;
+    LOG2("SetDuration %lld", mAudioDurationUs);
+}
 
 status_t DummyAudioSource::start(MetaData *params) {
     status_t err = OK;
@@ -143,7 +148,7 @@
     meta->setInt32(kKeySampleRate, mSamplingRate);
     meta->setInt64(kKeyDuration, mFrameDurationUs);
 
-     meta->setCString(kKeyDecoderComponent, "DummyAudioSource");
+    meta->setCString(kKeyDecoderComponent, "DummyAudioSource");
 
     return meta;
 }
@@ -159,11 +164,14 @@
     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
         CHECK(seekTimeUs >= 0);
         mTimeStampUs = seekTimeUs;
-     }
-
-    if (mTimeStampUs >= mAudioDurationUs) {
-        *out = NULL;
-        return ERROR_END_OF_STREAM;
+    }
+    {
+        Mutex::Autolock autoLock(mLock);
+        if (mTimeStampUs >= mAudioDurationUs) {
+            *out = NULL;
+            LOGI("EOS reached");
+            return ERROR_END_OF_STREAM;
+        }
     }
 
     err = mBufferGroup->acquire_buffer(&buffer);
diff --git a/libvideoeditor/lvpp/DummyAudioSource.h b/libvideoeditor/lvpp/DummyAudioSource.h
index a562bc1..6e6ead4 100755
--- a/libvideoeditor/lvpp/DummyAudioSource.h
+++ b/libvideoeditor/lvpp/DummyAudioSource.h
@@ -45,8 +45,9 @@
     virtual status_t start(MetaData *params = NULL);

     virtual status_t stop();

     virtual sp<MetaData> getFormat();

-    virtual status_t read (MediaBuffer **buffer, 

+    virtual status_t read (MediaBuffer **buffer,

                             const MediaSource::ReadOptions *options = NULL);

+    void setDuration (int64_t audioDurationUs);

 

 protected:

     DummyAudioSource (int32_t samplingRate,

@@ -63,6 +64,8 @@
     int64_t mAudioDurationUs;

     int64_t mTimeStampUs;

     int32_t mNbBuffer;

+    Mutex mLock;

+

     MediaBufferGroup *mBufferGroup;

 

     DummyAudioSource(const DummyAudioSource &);

diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp
index 360206e..8a3576c 100644
--- a/libvideoeditor/lvpp/PreviewPlayer.cpp
+++ b/libvideoeditor/lvpp/PreviewPlayer.cpp
@@ -27,7 +27,6 @@
 #include "DummyAudioSource.h"
 #include "DummyVideoSource.h"
 #include "VideoEditorSRC.h"
-#include "include/LiveSession.h"
 #include "include/NuCachedSource2.h"
 #include "include/ThrottledSource.h"
 
@@ -182,6 +181,7 @@
     mLastVideoBuffer = NULL;
     mSuspensionState = NULL;
     mEffectsSettings = NULL;
+    mVeAudioPlayer = NULL;
     mAudioMixStoryBoardTS = 0;
     mCurrentMediaBeginCutTime = 0;
     mCurrentMediaVolumeValue = 0;
@@ -192,11 +192,12 @@
     mProgressCbInterval = 0;
     mNumberDecVideoFrames = 0;
     mOverlayUpdateEventPosted = false;
+    mIsChangeSourceRequired = true;
 
     mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
     mVideoEventPending = false;
     mStreamDoneEvent = new PreviewPlayerEvent(this,
-         &AwesomePlayer::onStreamDone);
+         &PreviewPlayer::onStreamDone);
 
     mStreamDoneEventPending = false;
 
@@ -396,7 +397,10 @@
 
     mTimeSource = NULL;
 
-    delete mAudioPlayer;
+    //Single audio player instance used
+    //So donot delete it here
+    //It is deleted from PreviewController class
+    //delete mAudioPlayer;
     mAudioPlayer = NULL;
 
     if (mLastVideoBuffer) {
@@ -451,18 +455,6 @@
     }
 }
 
-void PreviewPlayer::partial_reset_l() {
-
-    if (mLastVideoBuffer) {
-        mLastVideoBuffer->release();
-        mLastVideoBuffer = NULL;
-    }
-
-    /* call base struct */
-    AwesomePlayer::partial_reset_l();
-
-}
-
 status_t PreviewPlayer::play() {
     Mutex::Autolock autoLock(mLock);
 
@@ -471,8 +463,127 @@
     return play_l();
 }
 
+status_t PreviewPlayer::startAudioPlayer_l() {
+    CHECK(!(mFlags & AUDIO_RUNNING));
+
+    if (mAudioSource == NULL || mAudioPlayer == NULL) {
+        return OK;
+    }
+
+    if (!(mFlags & AUDIOPLAYER_STARTED)) {
+        mFlags |= AUDIOPLAYER_STARTED;
+
+        // We've already started the MediaSource in order to enable
+        // the prefetcher to read its data.
+        status_t err = mVeAudioPlayer->start(
+                true /* sourceAlreadyStarted */);
+
+        if (err != OK) {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+            return err;
+        }
+    } else {
+        mVeAudioPlayer->resume();
+    }
+
+    mFlags |= AUDIO_RUNNING;
+
+    mWatchForAudioEOS = true;
+
+    return OK;
+}
+
+status_t PreviewPlayer::setAudioPlayer(AudioPlayer *audioPlayer) {
+    Mutex::Autolock autoLock(mLock);
+    CHECK(!(mFlags & PLAYING));
+    mAudioPlayer = audioPlayer;
+
+    LOGV("SetAudioPlayer");
+    mIsChangeSourceRequired = true;
+    mVeAudioPlayer =
+            (VideoEditorAudioPlayer*)mAudioPlayer;
+
+    // check if the new and old source are dummy
+    sp<MediaSource> anAudioSource = mVeAudioPlayer->getSource();
+    if (anAudioSource == NULL) {
+        // Audio player does not have any source set.
+        LOGV("setAudioPlayer: Audio player does not have any source set");
+        return OK;
+    }
+
+    const char *pSrcType1;
+    const char *pSrcType2;
+    sp<MetaData> meta = anAudioSource->getFormat();
+
+    if (meta->findCString(kKeyDecoderComponent, &pSrcType1)) {
+        if (strcmp(pSrcType1, "DummyAudioSource") == 0) {
+            meta = mAudioSource->getFormat();
+            if (meta->findCString(kKeyDecoderComponent, &pSrcType2)) {
+                if (strcmp(pSrcType2, "DummyAudioSource") == 0) {
+                    mIsChangeSourceRequired = false;
+                    // Just set the new play duration for the existing source
+                    MediaSource *pMediaSrc = anAudioSource.get();
+                    DummyAudioSource *pDummyAudioSource = (DummyAudioSource*)pMediaSrc;
+                    //Increment the duration of audio source
+                    pDummyAudioSource->setDuration((int64_t)((mPlayEndTimeMsec)*1000));
+                }
+            }
+        }
+    }
+
+    return OK;
+}
+
+void PreviewPlayer::onStreamDone() {
+    // Posted whenever any stream finishes playing.
+
+    Mutex::Autolock autoLock(mLock);
+    if (!mStreamDoneEventPending) {
+        return;
+    }
+    mStreamDoneEventPending = false;
+
+    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
+        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
+
+        notifyListener_l(
+                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
+
+        pause_l(true /* at eos */);
+
+        mFlags |= AT_EOS;
+        return;
+    }
+
+    const bool allDone =
+        (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
+            && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
+
+    if (!allDone) {
+        return;
+    }
+
+    if (mFlags & (LOOPING | AUTO_LOOPING)) {
+        seekTo_l(0);
+
+        if (mVideoSource != NULL) {
+            postVideoEvent_l();
+        }
+    } else {
+        LOGV("MEDIA_PLAYBACK_COMPLETE");
+        //pause before sending event
+        pause_l(true /* at eos */);
+        notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
+
+        mFlags |= AT_EOS;
+    }
+}
+
+
 status_t PreviewPlayer::play_l() {
-VideoEditorAudioPlayer  *mVePlayer;
+
+    mFlags &= ~SEEK_PREVIEW;
+
     if (mFlags & PLAYING) {
         return OK;
     }
@@ -496,44 +607,91 @@
             if (mAudioSink != NULL) {
 
                 mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
-                mVePlayer =
+                mVeAudioPlayer =
                           (VideoEditorAudioPlayer*)mAudioPlayer;
 
                 mAudioPlayer->setSource(mAudioSource);
 
-                mVePlayer->setAudioMixSettings(
+                mVeAudioPlayer->setAudioMixSettings(
                  mPreviewPlayerAudioMixSettings);
 
-                mVePlayer->setAudioMixPCMFileHandle(
+                mVeAudioPlayer->setAudioMixPCMFileHandle(
                  mAudioMixPCMFileHandle);
 
-                mVePlayer->setAudioMixStoryBoardSkimTimeStamp(
+                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
                  mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
                  mCurrentMediaVolumeValue);
 
+                 mFlags |= AUDIOPLAYER_STARTED;
                 // We've already started the MediaSource in order to enable
                 // the prefetcher to read its data.
-                status_t err = mVePlayer->start(
+                status_t err = mVeAudioPlayer->start(
                         true /* sourceAlreadyStarted */);
 
                 if (err != OK) {
-                    delete mAudioPlayer;
+                    //delete mAudioPlayer;
                     mAudioPlayer = NULL;
 
                     mFlags &= ~(PLAYING | FIRST_FRAME);
                     return err;
                 }
 
-                mTimeSource = mVePlayer; //mAudioPlayer;
-
+                mTimeSource = mVeAudioPlayer;
+                mFlags |= AUDIO_RUNNING;
                 deferredAudioSeek = true;
                 mWatchForAudioSeekComplete = false;
                 mWatchForAudioEOS = true;
             }
         } else {
-            mVePlayer->resume();
-        }
+            mVeAudioPlayer = (VideoEditorAudioPlayer*)mAudioPlayer;
+            bool isAudioPlayerStarted = mVeAudioPlayer->isStarted();
 
+            if (mIsChangeSourceRequired == true) {
+                LOGV("play_l: Change audio source required");
+
+                if (isAudioPlayerStarted == true) {
+                    mVeAudioPlayer->pause();
+                }
+
+                mVeAudioPlayer->setSource(mAudioSource);
+                mVeAudioPlayer->setObserver(this);
+
+                mVeAudioPlayer->setAudioMixSettings(
+                 mPreviewPlayerAudioMixSettings);
+
+                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
+                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
+                    mCurrentMediaVolumeValue);
+
+                if (isAudioPlayerStarted == true) {
+                    mVeAudioPlayer->resume();
+                } else {
+                    status_t err = OK;
+                    err = mVeAudioPlayer->start(true);
+                    if (err != OK) {
+                        mAudioPlayer = NULL;
+                        mVeAudioPlayer = NULL;
+
+                        mFlags &= ~(PLAYING | FIRST_FRAME);
+                        return err;
+                    }
+                }
+            } else {
+                LOGV("play_l: No Source change required");
+                mVeAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
+                    mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
+                    mCurrentMediaVolumeValue);
+
+                mVeAudioPlayer->resume();
+            }
+
+            mFlags |= AUDIOPLAYER_STARTED;
+            mFlags |= AUDIO_RUNNING;
+            mTimeSource = mVeAudioPlayer;
+            deferredAudioSeek = true;
+            mWatchForAudioSeekComplete = false;
+            mWatchForAudioEOS = true;
+        }
     }
 
     if (mTimeSource == NULL && mAudioPlayer == NULL) {
@@ -547,6 +705,7 @@
         MediaBuffer *aLocalBuffer;
         options.setSeekTo(mSeekTimeUs);
         mVideoSource->read(&aLocalBuffer, &options);
+        aLocalBuffer->release();
     }
 
     if (mVideoSource != NULL) {
@@ -736,6 +895,11 @@
     }
     mVideoEventPending = false;
 
+    if (mFlags & SEEK_PREVIEW) {
+        mFlags &= ~SEEK_PREVIEW;
+        return;
+    }
+
     TimeSource *ts_st =  &mSystemTimeSource;
     int64_t timeStartUs = ts_st->getRealTimeUs();
 
@@ -756,8 +920,9 @@
             // locations, we'll "pause" the audio source, causing it to
             // stop reading input data until a subsequent seek.
 
-            if (mAudioPlayer != NULL) {
+            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
                 mAudioPlayer->pause();
+                mFlags &= ~AUDIO_RUNNING;
             }
             mAudioSource->pause();
         }
@@ -789,8 +954,9 @@
                     if (mVideoRenderer != NULL) {
                         mVideoRendererIsPreview = false;
                         err = initRenderer_l();
-                           if ( err != OK )
-                        postStreamDoneEvent_l(err); // santosh
+                        if (err != OK) {
+                            postStreamDoneEvent_l(err);
+                        }
 
                     }
                     continue;
@@ -803,11 +969,11 @@
                 finishSeekIfNecessary(-1);
                 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
                 mFlags |= VIDEO_AT_EOS;
-                if (mOverlayUpdateEventPosted) {
-                    mOverlayUpdateEventPosted = false;
-                    postOverlayUpdateEvent_l();
-                }
+                mFlags |= AUDIO_AT_EOS;
+                mOverlayUpdateEventPosted = false;
                 postStreamDoneEvent_l(err);
+                // Set the last decoded timestamp to duration
+                mDecodedVideoTs = (mPlayEndTimeMsec*1000);
                 return;
             }
 
@@ -823,14 +989,23 @@
             int64_t videoTimeUs;
             CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
 
-            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
-                // Frames are before begin cut time
-                // Donot render
-                mVideoBuffer->release();
-                mVideoBuffer = NULL;
-                continue;
+            if (mSeeking != NO_SEEK) {
+                if (videoTimeUs < mSeekTimeUs) {
+                    // buffers are before seek time
+                    // ignore them
+                    mVideoBuffer->release();
+                    mVideoBuffer = NULL;
+                    continue;
+                }
+            } else {
+                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
+                    // Frames are before begin cut time
+                    // Donot render
+                    mVideoBuffer->release();
+                    mVideoBuffer = NULL;
+                    continue;
+                }
             }
-
             break;
         }
     }
@@ -845,7 +1020,6 @@
         mVideoTimeUs = timeUs;
     }
 
-    mDecodedVideoTs = timeUs;
 
     if(!mStartNextPlayer) {
         int64_t playbackTimeRemaining = (mPlayEndTimeMsec*1000) - timeUs;
@@ -860,6 +1034,13 @@
 
     SeekType wasSeeking = mSeeking;
     finishSeekIfNecessary(timeUs);
+    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
+        status_t err = startAudioPlayer_l();
+        if (err != OK) {
+            LOGE("Starting the audio player failed w/ err %d", err);
+            return;
+        }
+    }
 
     TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
 
@@ -917,8 +1098,9 @@
         mVideoRendererIsPreview = false;
 
         status_t err = initRenderer_l();
-        if ( err != OK )
-        postStreamDoneEvent_l(err); // santosh
+        if (err != OK) {
+            postStreamDoneEvent_l(err);
+        }
     }
 
     // If timestamp exceeds endCutTime of clip, donot render
@@ -932,13 +1114,12 @@
         mFlags |= VIDEO_AT_EOS;
         mFlags |= AUDIO_AT_EOS;
         LOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
-        if (mOverlayUpdateEventPosted) {
-            mOverlayUpdateEventPosted = false;
-            postOverlayUpdateEvent_l();
-        }
+        mOverlayUpdateEventPosted = false;
         postStreamDoneEvent_l(ERROR_END_OF_STREAM);
         return;
     }
+    // Capture the frame timestamp to be rendered
+    mDecodedVideoTs = timeUs;
 
     // Post processing to apply video effects
     for(i=0;i<mNumberEffects;i++) {
@@ -956,7 +1137,6 @@
             ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
              (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
               && (mEffectsSettings[i].uiDuration != 0)) {
-
             setVideoPostProcessingNode(
              mEffectsSettings[i].VideoEffectType, TRUE);
         }
@@ -970,7 +1150,6 @@
     if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
         mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
         if (!mOverlayUpdateEventPosted) {
-
             // Find the effect in effectSettings array
             int index;
             for (index = 0; index < mNumberEffects; index++) {
@@ -1053,13 +1232,15 @@
         LOGV("PreviewPlayer: onVideoEvent EOS.");
         mFlags |= VIDEO_AT_EOS;
         mFlags |= AUDIO_AT_EOS;
-        if (mOverlayUpdateEventPosted) {
-            mOverlayUpdateEventPosted = false;
-            postOverlayUpdateEvent_l();
-        }
+        mOverlayUpdateEventPosted = false;
         postStreamDoneEvent_l(ERROR_END_OF_STREAM);
     }
     else {
+        if ((wasSeeking != NO_SEEK) && (mFlags & SEEK_PREVIEW)) {
+            mFlags &= ~SEEK_PREVIEW;
+            return;
+        }
+
         if(!mIsVideoSourceJpg) {
             postVideoEvent_l(0);
         }
@@ -1776,13 +1957,15 @@
                     if (mVideoRenderer != NULL) {
                         mVideoRendererIsPreview = false;
                         err = initRenderer_l();
-                        if ( err != OK )
-                                postStreamDoneEvent_l(err); // santosh
+                        if (err != OK) {
+                            postStreamDoneEvent_l(err);
+                        }
                     }
                     continue;
                 }
                 LOGV("PreviewPlayer: onVideoEvent EOS reached.");
                 mFlags |= VIDEO_AT_EOS;
+                mFlags |= AUDIO_AT_EOS;
                 postStreamDoneEvent_l(err);
                 return OK;
             }
@@ -1798,15 +1981,23 @@
 
             int64_t videoTimeUs;
             CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
-
-            if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
-                // buffers are before begin cut time
-                // ignore them
-                mVideoBuffer->release();
-                mVideoBuffer = NULL;
-                continue;
+            if (mSeeking != NO_SEEK) {
+                if (videoTimeUs < mSeekTimeUs) {
+                    // buffers are before seek time
+                    // ignore them
+                    mVideoBuffer->release();
+                    mVideoBuffer = NULL;
+                    continue;
+                }
+            } else {
+                if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
+                    // buffers are before begin cut time
+                    // ignore them
+                    mVideoBuffer->release();
+                    mVideoBuffer = NULL;
+                    continue;
+                }
             }
-
             break;
         }
     }
@@ -1825,4 +2016,9 @@
 
 }
 
+status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
+    *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec);
+    return OK;
+}
+
 }  // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayer.h b/libvideoeditor/lvpp/PreviewPlayer.h
index 564c014..0800115 100644
--- a/libvideoeditor/lvpp/PreviewPlayer.h
+++ b/libvideoeditor/lvpp/PreviewPlayer.h
@@ -92,7 +92,8 @@
     status_t resetJniCallbackTimeStamp();
     status_t setImageClipProperties(uint32_t width, uint32_t height);
     status_t readFirstVideoFrame();
-
+    status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);
+    status_t setAudioPlayer(AudioPlayer *audioPlayer);
 
 private:
     friend struct PreviewPlayerEvent;
@@ -118,16 +119,18 @@
     status_t setDataSource_l(
         const char *uri, const KeyedVector<String8, String8> *headers);
     void reset_l();
-    void partial_reset_l();
     status_t play_l();
     status_t initRenderer_l();
     status_t initAudioDecoder();
     status_t initVideoDecoder(uint32_t flags = 0);
     void onVideoEvent();
+    void onStreamDone();
     status_t finishSetDataSource_l();
     static bool ContinuePreparation(void *cookie);
     void onPrepareAsyncEvent();
     void finishAsyncPrepare_l();
+    status_t startAudioPlayer_l();
+    bool mIsChangeSourceRequired;
 
     sp<PreviewPlayerRenderer> mVideoRenderer;
 
@@ -218,6 +221,8 @@
     status_t prepare_l();
     status_t prepareAsync_l();
 
+    VideoEditorAudioPlayer  *mVeAudioPlayer;
+
     PreviewPlayer(const PreviewPlayer &);
     PreviewPlayer &operator=(const PreviewPlayer &);
 };
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
index ed9de6e..f33bd0a 100755
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
@@ -38,6 +38,7 @@
 
     LOGV("VideoEditorAudioPlayer");
     mBGAudioPCMFileHandle = NULL;
+    mAudioProcess = NULL;
     mBGAudioPCMFileLength = 0;
     mBGAudioPCMFileTrimmedLength = 0;
     mBGAudioPCMFileDuration = 0;
@@ -47,6 +48,7 @@
     mBGAudioStoryBoardCurrentMediaBeginCutTS = 0;
     mBGAudioStoryBoardCurrentMediaVolumeVal = 0;
     mSeekTimeUs = 0;
+    mSource = NULL;
 }
 
 VideoEditorAudioPlayer::~VideoEditorAudioPlayer() {
@@ -55,10 +57,35 @@
     if (mStarted) {
         reset();
     }
+    if (mAudioProcess != NULL) {
+        delete mAudioProcess;
+        mAudioProcess = NULL;
+    }
+}
+void VideoEditorAudioPlayer::setSource(const sp<MediaSource> &source) {
+    Mutex::Autolock autoLock(mLock);
+    mSource = source;
+    mReachedEOS = false;
+}
+
+sp<MediaSource> VideoEditorAudioPlayer::getSource() {
+    Mutex::Autolock autoLock(mLock);
+    return mSource;
+}
+
+void VideoEditorAudioPlayer::setObserver(AwesomePlayer *observer) {
+    LOGV("setObserver");
+    //CHECK(!mStarted);
+    mObserver = observer;
+}
+
+
+bool VideoEditorAudioPlayer::isStarted() {
+    return mStarted;
 }
 
 status_t VideoEditorAudioPlayer::start(bool sourceAlreadyStarted) {
-
+    Mutex::Autolock autoLock(mLock);
     CHECK(!mStarted);
     CHECK(mSource != NULL);
     LOGV("Start");
@@ -80,11 +107,16 @@
     veAudMixSettings audioMixSettings;
 
     // Pass on the audio ducking parameters
-    audioMixSettings.lvInDucking_threshold = mAudioMixSettings->uiInDucking_threshold;
-    audioMixSettings.lvInDucking_lowVolume = ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
-    audioMixSettings.lvInDucking_enable = mAudioMixSettings->bInDucking_enable;
-    audioMixSettings.lvPTVolLevel = ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
-    audioMixSettings.lvBTVolLevel = ((M4OSA_Float)mAudioMixSettings->uiAddVolume) /100.0;
+    audioMixSettings.lvInDucking_threshold =
+        mAudioMixSettings->uiInDucking_threshold;
+    audioMixSettings.lvInDucking_lowVolume =
+        ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
+    audioMixSettings.lvInDucking_enable =
+        mAudioMixSettings->bInDucking_enable;
+    audioMixSettings.lvPTVolLevel =
+        ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
+    audioMixSettings.lvBTVolLevel =
+        ((M4OSA_Float)mAudioMixSettings->uiAddVolume) / 100.0;
     audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
     audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
 
@@ -301,6 +333,33 @@
     return OK;
 }
 
+void VideoEditorAudioPlayer::resume() {
+
+    veAudMixSettings audioMixSettings;
+
+    // Single audio player is used;
+    // Pass on the audio ducking parameters
+    // which might have changed with new audio source
+    audioMixSettings.lvInDucking_threshold =
+        mAudioMixSettings->uiInDucking_threshold;
+    audioMixSettings.lvInDucking_lowVolume =
+        ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
+    audioMixSettings.lvInDucking_enable =
+        mAudioMixSettings->bInDucking_enable;
+    audioMixSettings.lvPTVolLevel =
+        ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
+    audioMixSettings.lvBTVolLevel =
+        ((M4OSA_Float)mAudioMixSettings->uiAddVolume) / 100.0;
+    audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
+    audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
+
+    // Call to Audio mix param setting
+    mAudioProcess->veSetAudioProcessingParams(audioMixSettings);
+
+    //Call the base class
+    AudioPlayer::resume();
+}
+
 void VideoEditorAudioPlayer::reset() {
 
     LOGV("reset");
@@ -331,9 +390,9 @@
     size_t size_remaining = size;
 
     M4OSA_ERR err = M4NO_ERROR;
-    M4AM_Buffer bgFrame = {NULL, 0};
-    M4AM_Buffer mixFrame = {NULL, 0};
-    M4AM_Buffer ptFrame = {NULL, 0};
+    M4AM_Buffer16 bgFrame = {NULL, 0};
+    M4AM_Buffer16 mixFrame = {NULL, 0};
+    M4AM_Buffer16 ptFrame = {NULL, 0};
     int64_t currentSteamTS = 0;
     int64_t startTimeForBT = 0;
     M4OSA_Float fPTVolLevel =
@@ -379,7 +438,11 @@
 
                 mIsFirstBuffer = false;
             } else {
-                status = mSource->read(&mInputBuffer, &options);
+
+                {
+                    Mutex::Autolock autoLock(mLock);
+                    status = mSource->read(&mInputBuffer, &options);
+                }
                 // Data is Primary Track, mix with background track
                 // after reading same size from Background track PCM file
                 if (status == OK)
@@ -390,9 +453,9 @@
                           (int64_t)(mAudioMixSettings->uiAddCts * 1000)) {
 
                         LOGV("VideoEditorAudioPlayer::INSIDE MIXING");
-                        LOGV("Checking %lld <= %lld - %d",
+                        LOGV("Checking %lld <= %lld",
                             mBGAudioPCMFileSeekPoint-mBGAudioPCMFileOriginalSeekPoint,
-                            mBGAudioPCMFileTrimmedLength, len);
+                            mBGAudioPCMFileTrimmedLength);
 
 
                         M4OSA_Void* ptr;
@@ -403,8 +466,8 @@
                         M4OSA_Context fp = M4OSA_NULL;
 
                         uiPCMsize = (mInputBuffer->range_length())/2;
-                        pPTMdata = (M4OSA_Int16*)(mInputBuffer->data() +
-                        mInputBuffer->range_offset());
+                        pPTMdata = (M4OSA_Int16*) ((uint8_t*) mInputBuffer->data()
+                                + mInputBuffer->range_offset());
 
                         LOGV("mix with background malloc to do len %d", len);
 
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
index 92b77b6..f5232cf 100755
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
@@ -49,6 +49,7 @@
     virtual ~VideoEditorAudioPlayer();

 

     status_t start(bool sourceAlreadyStarted = false);

+    void resume();

 

     void setAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);

     void setAudioMixPCMFileHandle(M4OSA_Context pBGAudioPCMFileHandle);

@@ -57,6 +58,11 @@
         M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,

         M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);

 

+    void setObserver(AwesomePlayer *observer);

+    void setSource(const sp<MediaSource> &source);

+    sp<MediaSource> getSource();

+

+    bool isStarted();

 private:

 

     M4xVSS_AudioMixingSettings *mAudioMixSettings;

diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
index 2049f08..28208d1 100755
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
@@ -50,9 +50,9 @@
 M4OSA_Int32 VideoEditorBGAudioProcessing::veProcessAudioMixNDuck(

         void *pPTBuffer, void *pBTBuffer, void *pOutBuffer) {

 

-    M4AM_Buffer* pPrimaryTrack   = (M4AM_Buffer*)pPTBuffer;

-    M4AM_Buffer* pBackgroundTrack = (M4AM_Buffer*)pBTBuffer;

-    M4AM_Buffer* pMixedOutBuffer  = (M4AM_Buffer*)pOutBuffer;

+    M4AM_Buffer16* pPrimaryTrack   = (M4AM_Buffer16*)pPTBuffer;

+    M4AM_Buffer16* pBackgroundTrack = (M4AM_Buffer16*)pBTBuffer;

+    M4AM_Buffer16* pMixedOutBuffer  = (M4AM_Buffer16*)pOutBuffer;

 

     LOGV("VideoEditorBGAudioProcessing::lvProcessAudioMixNDuck \

      pPTBuffer 0x%x pBTBuffer 0x%x pOutBuffer 0x%x", pPTBuffer,

diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
index 851a133..24586a7 100755
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
+++ b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
@@ -31,7 +31,7 @@
 typedef struct {

     M4OSA_UInt16*   m_dataAddress; // Android SRC needs a Int16 pointer

     M4OSA_UInt32    m_bufferSize;

-} M4AM_Buffer;

+} M4AM_Buffer16;    // Structure contains Int16_t pointer

 

 // Following struct will be used by app to supply the PT and BT properties

 // along with ducking values

@@ -56,7 +56,6 @@
     void veSetAudioProcessingParams(veAudMixSettings mixParams);

     M4OSA_Int32 veProcessAudioMixNDuck(void* , void *, void *);

 

-protected:

     ~VideoEditorBGAudioProcessing();

 

 private:

@@ -79,9 +78,9 @@
     M4OSA_Float mPTVolLevel;

     M4OSA_Float mBTVolLevel;

 

-    M4AM_Buffer mPTBuffer;

-    M4AM_Buffer mBTBuffer;

-    M4AM_Buffer mOutMixBuffer;

+    M4AM_Buffer16 mPTBuffer;

+    M4AM_Buffer16 mBTBuffer;

+    M4AM_Buffer16 mOutMixBuffer;

     M4OSA_Int16 *mTempBuffer;

     M4OSA_Int32 mTempFrameCount;

 

diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
index 46b18d9..fe99472 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
@@ -49,13 +49,16 @@
     return OK;

 }

 

+

+status_t VideoEditorPlayer::setAudioPlayer(VideoEditorAudioPlayer *audioPlayer) {

+    return mPlayer->setAudioPlayer(audioPlayer);

+}

+

+

 status_t VideoEditorPlayer::setDataSource(

         const char *url, const KeyedVector<String8, String8> *headers) {

     LOGI("setDataSource('%s')", url);

 

-    mVeAudioSink = new VeAudioOutput();

-    mPlayer->setAudioSink(mVeAudioSink);

-

     return mPlayer->setDataSource(url, headers);

 }

 

@@ -66,6 +69,7 @@
     return (!OK);

 }

 

+// needed?

 status_t VideoEditorPlayer::setVideoISurface(const sp<ISurface> &surface) {

     LOGV("setVideoISurface");

 

@@ -80,6 +84,13 @@
     return OK;

 }

 

+status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {

+    LOGV("setVideoSurfaceTexture");

+

+    mPlayer->setSurfaceTexture(surfaceTexture);

+    return OK;

+}

+

 status_t VideoEditorPlayer::prepare() {

     LOGV("prepare");

     return mPlayer->prepare();

@@ -277,6 +288,11 @@
     return mPlayer->readFirstVideoFrame();

 }

 

+status_t VideoEditorPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {

+    mPlayer->getLastRenderedTimeMs(lastRenderedTimeMs);

+    return NO_ERROR;

+}

+

 /* Implementation of AudioSink interface */

 #undef LOG_TAG

 #define LOG_TAG "VeAudioSink"

@@ -304,7 +320,7 @@
 void VideoEditorPlayer::VeAudioOutput::setMinBufferCount() {

 

     mIsOnEmulator = false;

-    mMinBufferCount =12;

+    mMinBufferCount = 4;

 }

 

 bool VideoEditorPlayer::VeAudioOutput::isOnEmulator() {

@@ -368,8 +384,8 @@
 

     // Check argument "bufferCount" against the mininum buffer count

     if (bufferCount < mMinBufferCount) {

-        LOGD("bufferCount (%d) is too small and increased to %d",

-         bufferCount, mMinBufferCount);

+        LOGV("bufferCount (%d) is too small and increased to %d",

+            bufferCount, mMinBufferCount);

         bufferCount = mMinBufferCount;

 

     }

diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
index 47d174d..1febef4 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -23,14 +23,14 @@
 #include "M4xVSS_API.h"

 #include "VideoEditorMain.h"

 #include "VideoEditorTools.h"

-

+#include "VideoEditorAudioPlayer.h"

 

 namespace android {

 

 struct PreviewPlayer;

 

 class VideoEditorPlayer : public MediaPlayerInterface {

-

+    public:

     class VeAudioOutput: public MediaPlayerBase::AudioSink

     {

     public:

@@ -97,8 +97,10 @@
             const char *url, const KeyedVector<String8, String8> *headers);

 

     virtual status_t setDataSource(int fd, int64_t offset, int64_t length);

+    // needed?

     virtual status_t setVideoISurface(const sp<ISurface> &surface);

     virtual status_t setVideoSurface(const sp<Surface> &surface);

+    virtual status_t setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);

     virtual status_t prepare();

     virtual status_t prepareAsync();

     virtual status_t start();

@@ -142,8 +144,9 @@
     virtual status_t resetJniCallbackTimeStamp();

     virtual status_t setImageClipProperties(uint32_t width, uint32_t height);

     virtual status_t readFirstVideoFrame();

+    virtual status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);

 

-

+    status_t setAudioPlayer(VideoEditorAudioPlayer *audioPlayer);

 private:

     PreviewPlayer       *mPlayer;

     sp<VeAudioOutput>    mVeAudioSink;

diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
index 78f0cda..180f75e 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
@@ -25,6 +25,7 @@
 

 VideoEditorPreviewController::VideoEditorPreviewController()

     : mCurrentPlayer(0),

+      mActivePlayerIndex(0),

       mThreadContext(NULL),

       mPlayerState(VePlayerIdle),

       mPrepareReqest(M4OSA_FALSE),

@@ -50,6 +51,7 @@
       mOutputVideoWidth(0),

       mOutputVideoHeight(0),

       bStopThreadInProgress(false),

+      mOverlayState(OVERLAY_CLEAR),

       mSemThreadWait(NULL) {

     LOGV("VideoEditorPreviewController");

     mRenderingMode = M4xVSS_kBlackBorders;

@@ -128,11 +130,18 @@
         mAudioMixPCMFileHandle = M4OSA_NULL;

     }

 

+    if (mBackgroundAudioSetting != NULL) {

+        M4OSA_free((M4OSA_MemAddr32)mBackgroundAudioSetting);

+        mBackgroundAudioSetting = NULL;

+    }

+

     if(mTarget != NULL) {

         delete mTarget;

         mTarget = NULL;

     }

 

+    mOverlayState = OVERLAY_CLEAR;

+

     LOGV("~VideoEditorPreviewController returns");

 }

 

@@ -452,6 +461,13 @@
         mTarget = NULL;

     }

 

+    // Create Audio player to be used for entire

+    // storyboard duration

+    mVEAudioSink = new VideoEditorPlayer::VeAudioOutput();

+    mVEAudioPlayer = new VideoEditorAudioPlayer(mVEAudioSink);

+    mVEAudioPlayer->setAudioMixSettings(mBackgroundAudioSetting);

+    mVEAudioPlayer->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);

+

     LOGV("startPreview: loop = %d", loop);

     mPreviewLooping = loop;

 

@@ -542,6 +558,7 @@
 

     // Start playing with player instance 0

     mCurrentPlayer = 0;

+    mActivePlayerIndex = 0;

 

     if(toMs == -1) {

         LOGV("startPreview: Preview till end of storyboard");

@@ -614,8 +631,9 @@
     return M4NO_ERROR;

 }

 

-M4OSA_ERR VideoEditorPreviewController::stopPreview() {

+M4OSA_UInt32 VideoEditorPreviewController::stopPreview() {

     M4OSA_ERR err = M4NO_ERROR;

+    uint32_t lastRenderedFrameTimeMs = 0;

     LOGV("stopPreview");

 

     // Stop the thread

@@ -650,12 +668,22 @@
                 LOGV("stop the player first");

                 mVePlayer[playerInst]->stop();

             }

+            if (playerInst == mActivePlayerIndex) {

+                // Return the last rendered frame time stamp

+                mVePlayer[mActivePlayerIndex]->getLastRenderedTimeMs(&lastRenderedFrameTimeMs);

+            }

 

             LOGV("stopPreview: clearing mVePlayer");

             mVePlayer[playerInst].clear();

             mVePlayer[playerInst] = NULL;

         }

     }

+    LOGV("stopPreview: clear audioSink and audioPlayer");

+    mVEAudioSink.clear();

+    if (mVEAudioPlayer) {

+        delete mVEAudioPlayer;

+        mVEAudioPlayer = NULL;

+    }

 

     // If image file playing, then free the buffer pointer

     if(mFrameStr.pBuffer != M4OSA_NULL) {

@@ -677,7 +705,8 @@
     mOutputVideoWidth = 0;

     mOutputVideoHeight = 0;

 

-    return M4NO_ERROR;

+    LOGV("stopPreview() lastRenderedFrameTimeMs %ld", lastRenderedFrameTimeMs);

+    return lastRenderedFrameTimeMs;

 }

 

 M4OSA_ERR VideoEditorPreviewController::clearSurface(

@@ -956,6 +985,7 @@
         LOGV("preparePlayer: seekTo(%d)",

          pController->mClipList[index]->uiBeginCutTime);

     }

+    pController->mVePlayer[pController->mCurrentPlayer]->setAudioPlayer(pController->mVEAudioPlayer);

 

     pController->mVePlayer[playerInstance]->readFirstVideoFrame();

     LOGV("preparePlayer: readFirstVideoFrame of clip");

@@ -1057,6 +1087,8 @@
              pController->mClipList[index]->uiBeginCutTime,

              pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);

         }

+        // Capture the active player being used

+        pController->mActivePlayerIndex = pController->mCurrentPlayer;

 

         pController->mVePlayer[pController->mCurrentPlayer]->start();

         LOGV("threadProc: started");

@@ -1123,6 +1155,21 @@
                  pController->mJniCookie, MSG_TYPE_PROGRESS_INDICATION,

                  &playedDuration);

 

+            if ((pController->mOverlayState == OVERLAY_UPDATE) &&

+                (pController->mCurrentClipNumber !=

+                (pController->mNumberClipsToPreview-1))) {

+                VideoEditorCurretEditInfo *pEditInfo =

+                    (VideoEditorCurretEditInfo*)M4OSA_malloc(sizeof(VideoEditorCurretEditInfo),

+                    M4VS, (M4OSA_Char*)"Current Edit info");

+                pEditInfo->overlaySettingsIndex = ext2;

+                pEditInfo->clipIndex = pController->mCurrentClipNumber;

+                pController->mOverlayState == OVERLAY_CLEAR;

+                if (pController->mJniCallback != NULL) {

+                        pController->mJniCallback(pController->mJniCookie,

+                            MSG_TYPE_OVERLAY_CLEAR, pEditInfo);

+                }

+                M4OSA_free((M4OSA_MemAddr32)pEditInfo);

+            }

             M4OSA_semaphorePost(pController->mSemThreadWait);

             break;

         }

@@ -1189,9 +1236,11 @@
             LOGV("pController->mCurrentClipNumber = %d",pController->mCurrentClipNumber);

             if (pController->mJniCallback != NULL) {

                 if (ext1 == 1) {

+                    pController->mOverlayState = OVERLAY_UPDATE;

                     pController->mJniCallback(pController->mJniCookie,

                         MSG_TYPE_OVERLAY_UPDATE, pEditInfo);

                 } else {

+                    pController->mOverlayState = OVERLAY_CLEAR;

                     pController->mJniCallback(pController->mJniCookie,

                         MSG_TYPE_OVERLAY_CLEAR, pEditInfo);

                 }

diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.h b/libvideoeditor/lvpp/VideoEditorPreviewController.h
index 2a2a665..e9e4e84 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.h
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.h
@@ -20,6 +20,7 @@
 

 #include <utils/Log.h>

 #include "VideoEditorPlayer.h"

+#include "VideoEditorAudioPlayer.h"

 #include "M4OSA_Semaphore.h"

 #include "M4OSA_Thread.h"

 #include "M4OSA_Clock.h"

@@ -51,6 +52,10 @@
     VePlayerAutoStop

 } VePlayerState;

 

+typedef enum {

+    OVERLAY_UPDATE = 0,

+    OVERLAY_CLEAR

+} OverlayState;

 

 // Callback mechanism from PreviewController to Jni  */

 typedef void (*jni_progress_callback_fct)(void* cookie, M4OSA_UInt32 msgType, void *argc);

@@ -70,7 +75,7 @@
     M4OSA_ERR startPreview(M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,

         M4OSA_UInt16 callBackAfterFrameCount, M4OSA_Bool loop) ;

 

-    M4OSA_ERR stopPreview();

+    M4OSA_UInt32 stopPreview();

 

     M4OSA_ERR renderPreviewFrame(const sp<Surface> &surface,

         VideoEditor_renderPreviewFrameStr* pFrameInfo,

@@ -117,6 +122,8 @@
     M4OSA_UInt32 mFirstPreviewClipBeginTime;

     M4OSA_UInt32 mLastPreviewClipEndTime;

     M4OSA_UInt32 mVideoStoryBoardTimeMsUptoFirstPreviewClip;

+    OverlayState mOverlayState;

+    int mActivePlayerIndex;

 

     M4xVSS_MediaRendering mRenderingMode;

     uint32_t mOutputVideoWidth;

@@ -125,6 +132,9 @@
     M4OSA_Context mSemThreadWait;

     bool mIsFiftiesEffectStarted;

 

+    sp<VideoEditorPlayer::VeAudioOutput> mVEAudioSink;

+    VideoEditorAudioPlayer *mVEAudioPlayer;

+

     M4VIFI_UInt8*  mFrameRGBBuffer;

     M4VIFI_UInt8*  mFrameYUVBuffer;

     static M4OSA_ERR preparePlayer(void* param, int playerInstance, int index);

diff --git a/libvideoeditor/lvpp/VideoEditorSRC.cpp b/libvideoeditor/lvpp/VideoEditorSRC.cpp
index 091bdfb..0990ad5 100755
--- a/libvideoeditor/lvpp/VideoEditorSRC.cpp
+++ b/libvideoeditor/lvpp/VideoEditorSRC.cpp
@@ -46,6 +46,7 @@
     mSeekTimeUs = -1;

     mLeftover = 0;

     mLastReadSize = 0;

+    mReSampledBuffer = NULL;

 #ifndef FROYO

     mSeekMode =  ReadOptions::SEEK_PREVIOUS_SYNC;

 #endif

@@ -168,6 +169,10 @@
     mAccuOutBufferSize  = 0;

     mLeftover = 0;

     mLastReadSize = 0;

+    if (mReSampledBuffer != NULL) {

+        free(mReSampledBuffer);

+        mReSampledBuffer = NULL;

+    }

 

     return OK;

 }

@@ -223,15 +228,21 @@
         memset(pTmpBuffer, 0x00, outFrameCnt * 2 * sizeof(int32_t));

         // Resample to target quality

         mResampler->resample(pTmpBuffer, outFrameCnt, this);

-        int16_t *reSampledBuffer = (int16_t*)malloc(outBufferSize);

-        memset(reSampledBuffer, 0x00, outBufferSize);

+

+        // Free previous allocation

+        if (mReSampledBuffer != NULL) {

+            free(mReSampledBuffer);

+            mReSampledBuffer = NULL;

+        }

+        mReSampledBuffer = (int16_t*)malloc(outBufferSize);

+        memset(mReSampledBuffer, 0x00, outBufferSize);

 

         // Convert back to 16 bits

-        AudioMixer::ditherAndClamp((int32_t*)reSampledBuffer, pTmpBuffer, outFrameCnt);

+        AudioMixer::ditherAndClamp((int32_t*)mReSampledBuffer, pTmpBuffer, outFrameCnt);

         LOGV("Resampled buffer size %d", outFrameCnt* 2 * sizeof(int16_t));

 

         // Create new MediaBuffer

-        mCopyBuffer = new MediaBuffer((void*)reSampledBuffer, outBufferSize);

+        mCopyBuffer = new MediaBuffer((void*)mReSampledBuffer, outBufferSize);

 

         // Compute and set the new timestamp

         sp<MetaData> to = mCopyBuffer->meta_data();

@@ -325,7 +336,9 @@
         }

         else {

             //copy the buffer

-            memcpy((uint8_t*) (pInterframeBuffer + mInterframeBufferPosition), (uint8_t*) (aBuffer->data() + aBuffer->range_offset()), aBuffer->range_length());

+            memcpy(((uint8_t*) pInterframeBuffer) + mInterframeBufferPosition,

+                    ((uint8_t*) aBuffer->data()) + aBuffer->range_offset(),

+                    aBuffer->range_length());

             LOGV("Read from buffer  %d", aBuffer->range_length());

 

             mInterframeBufferPosition += aBuffer->range_length();

diff --git a/libvideoeditor/lvpp/VideoEditorSRC.h b/libvideoeditor/lvpp/VideoEditorSRC.h
index 3c557a4..50da23f 100755
--- a/libvideoeditor/lvpp/VideoEditorSRC.h
+++ b/libvideoeditor/lvpp/VideoEditorSRC.h
@@ -15,6 +15,8 @@
  * limitations under the License.

  */

 

+#include <stdint.h>

+

 #include <utils/RefBase.h>

 #include <media/stagefright/MediaErrors.h>

 

@@ -98,6 +100,7 @@
 

         int64_t mSeekTimeUs;

         ReadOptions::SeekMode mSeekMode;

+        int16_t *mReSampledBuffer;

 

 };

 

diff --git a/libvideoeditor/lvpp/VideoEditorTools.cpp b/libvideoeditor/lvpp/VideoEditorTools.cpp
index 3c3a7b8..ab3aee1 100755
--- a/libvideoeditor/lvpp/VideoEditorTools.cpp
+++ b/libvideoeditor/lvpp/VideoEditorTools.cpp
@@ -1610,7 +1610,7 @@
                                                                 M4VIFI_ImagePlane *pPlaneIn,

                                                                 M4VIFI_ImagePlane *pPlaneOut)

 {

-    M4VIFI_UInt8    *pu8_data_in, *pu8_data_out;

+    M4VIFI_UInt8    *pu8_data_in, *pu8_data_out, *pu8dum;

     M4VIFI_UInt32   u32_plane;

     M4VIFI_UInt32   u32_width_in, u32_width_out, u32_height_in, u32_height_out;

     M4VIFI_UInt32   u32_stride_in, u32_stride_out;

@@ -1623,19 +1623,29 @@
     M4VIFI_UInt8    *pu8_src_top;

     M4VIFI_UInt8    *pu8_src_bottom;

 

-    if ( (pPlaneIn[0].u_height == pPlaneOut[0].u_height) && (pPlaneIn[0].u_width == pPlaneOut[0].u_width))

+    M4VIFI_UInt8    u8Wflag = 0;

+    M4VIFI_UInt8    u8Hflag = 0;

+    M4VIFI_UInt32   loop = 0;

+

+

+    /*

+     If input width is equal to output width and input height equal to

+     output height then M4VIFI_YUV420toYUV420 is called.

+    */

+    if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) &&

+              (pPlaneIn[0].u_width == pPlaneOut[0].u_width))

     {

         return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut);

     }

 

     /* Check for the YUV width and height are even */

-    if( (IS_EVEN(pPlaneIn[0].u_height) == FALSE)    ||

+    if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE)    ||

         (IS_EVEN(pPlaneOut[0].u_height) == FALSE))

     {

         return M4VIFI_ILLEGAL_FRAME_HEIGHT;

     }

 

-    if( (IS_EVEN(pPlaneIn[0].u_width) == FALSE) ||

+    if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) ||

         (IS_EVEN(pPlaneOut[0].u_width) == FALSE))

     {

         return M4VIFI_ILLEGAL_FRAME_WIDTH;

@@ -1659,6 +1669,16 @@
         u32_width_out   = pPlaneOut[u32_plane].u_width;

         u32_height_out  = pPlaneOut[u32_plane].u_height;

 

+        /*

+        For the case , width_out = width_in , set the flag to avoid

+        accessing one column beyond the input width.In this case the last

+        column is replicated for processing

+        */

+        if (u32_width_out == u32_width_in) {

+            u32_width_out = u32_width_out-1;

+            u8Wflag = 1;

+        }

+

         /* Compute horizontal ratio between src and destination width.*/

         if (u32_width_out >= u32_width_in)

         {

@@ -1669,6 +1689,16 @@
             u32_x_inc   = (u32_width_in * MAX_SHORT) / (u32_width_out);

         }

 

+        /*

+        For the case , height_out = height_in , set the flag to avoid

+        accessing one row beyond the input height.In this case the last

+        row is replicated for processing

+        */

+        if (u32_height_out == u32_height_in) {

+            u32_height_out = u32_height_out-1;

+            u8Hflag = 1;

+        }

+

         /* Compute vertical ratio between src and destination height.*/

         if (u32_height_out >= u32_height_in)

         {

@@ -1681,14 +1711,15 @@
 

         /*

         Calculate initial accumulator value : u32_y_accum_start.

-        u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

+        u32_y_accum_start is coded on 15 bits, and represents a value

+        between 0 and 0.5

         */

         if (u32_y_inc >= MAX_SHORT)

         {

-            /*

-                Keep the fractionnal part, assimung that integer  part is coded

-                on the 16 high bits and the fractionnal on the 15 low bits

-            */

+        /*

+        Keep the fractionnal part, assimung that integer  part is coded

+        on the 16 high bits and the fractional on the 15 low bits

+        */

             u32_y_accum = u32_y_inc & 0xffff;

 

             if (!u32_y_accum)

@@ -1705,8 +1736,9 @@
 

 

         /*

-            Calculate initial accumulator value : u32_x_accum_start.

-            u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5

+        Calculate initial accumulator value : u32_x_accum_start.

+        u32_x_accum_start is coded on 15 bits, and represents a value

+        between 0 and 0.5

         */

         if (u32_x_inc >= MAX_SHORT)

         {

@@ -1727,12 +1759,14 @@
         u32_height = u32_height_out;

 

         /*

-        Bilinear interpolation linearly interpolates along each row, and then uses that

-        result in a linear interpolation donw each column. Each estimated pixel in the

-        output image is a weighted combination of its four neighbours according to the formula:

-        F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+f(p+&,q+1)R(1-a)R(b-1)

-        with  R(x) = / x+1  -1 =< x =< 0 \ 1-x  0 =< x =< 1 and a (resp. b)weighting coefficient

-        is the distance from the nearest neighbor in the p (resp. q) direction

+        Bilinear interpolation linearly interpolates along each row, and

+        then uses that result in a linear interpolation donw each column.

+        Each estimated pixel in the output image is a weighted combination

+        of its four neighbours according to the formula:

+        F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+

+        f(p+&,q+1)R(1-a)R(b-1) with  R(x) = / x+1  -1 =< x =< 0 \ 1-x

+        0 =< x =< 1 and a (resp. b)weighting coefficient is the distance

+        from the nearest neighbor in the p (resp. q) direction

         */

 

         do { /* Scan all the row */

@@ -1762,6 +1796,16 @@
                 u32_x_accum += u32_x_inc;

             } while(--u32_width);

 

+            /*

+               This u8Wflag flag gets in to effect if input and output

+               width is same, and height may be different. So previous

+               pixel is replicated here

+            */

+            if (u8Wflag) {

+                *pu8_data_out = (M4VIFI_UInt8)u32_temp_value;

+            }

+

+            pu8dum = (pu8_data_out-u32_width_out);

             pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out;

 

             /* Update vertical accumulator */

@@ -1771,6 +1815,17 @@
                 u32_y_accum &= 0xffff;

             }

         } while(--u32_height);

+

+        /*

+        This u8Hflag flag gets in to effect if input and output height

+        is same, and width may be different. So previous pixel row is

+        replicated here

+        */

+        if (u8Hflag) {

+            for(loop =0; loop < (u32_width_out+u8Wflag); loop++) {

+                *pu8_data_out++ = (M4VIFI_UInt8)*pu8dum++;

+            }

+        }

     }

 

     return M4VIFI_OK;

@@ -2030,6 +2085,11 @@
                 }

             }

         }

+

+        if (m_air_context != M4OSA_NULL) {

+            M4AIR_cleanUp(m_air_context);

+            m_air_context = M4OSA_NULL;

+        }

     }

 

     return err;

diff --git a/libvideoeditor/osal/inc/M4OSA_Time.h b/libvideoeditor/osal/inc/M4OSA_Time.h
index 984c243..7545541 100755
--- a/libvideoeditor/osal/inc/M4OSA_Time.h
+++ b/libvideoeditor/osal/inc/M4OSA_Time.h
@@ -33,7 +33,7 @@
 #include "M4OSA_Types.h"
 
 
-typedef M4OSA_Int64   M4OSA_Time;
+typedef signed long long  M4OSA_Time;
 
 
 /** This macro sets the unknown time value */
diff --git a/libvideoeditor/osal/src/LVOSA_FileReader_optim.c b/libvideoeditor/osal/src/LVOSA_FileReader_optim.c
index 36541f0..4cf5fd8 100755
--- a/libvideoeditor/osal/src/LVOSA_FileReader_optim.c
+++ b/libvideoeditor/osal/src/LVOSA_FileReader_optim.c
@@ -167,7 +167,7 @@
 
     for(i=0; i<M4OSA_READBUFFER_NB; i++)
     {
-        apContext->buffer[i].data = (M4OSA_MemAddr8) M4OSA_malloc(M4OSA_READBUFFER_SIZE, M4OSA_FILE_READER, "M4OSA_FileReader_BufferInit");
+        apContext->buffer[i].data = (M4OSA_MemAddr8) M4OSA_malloc(M4OSA_READBUFFER_SIZE, M4OSA_FILE_READER, (M4OSA_Char *)"M4OSA_FileReader_BufferInit");
         M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, apContext->buffer[i].data);
     }
 
@@ -545,7 +545,7 @@
 
     /*      Allocate memory for the File reader context. */
     apContext = (M4OSA_FileReader_Context_optim *)M4OSA_malloc(sizeof(M4OSA_FileReader_Context_optim),
-                                      M4OSA_FILE_READER, "M4OSA_FileReader_Context_optim");
+                                      M4OSA_FILE_READER, (M4OSA_Char *)"M4OSA_FileReader_Context_optim");
 
     M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, apContext);
 
@@ -555,10 +555,10 @@
     /*Set the optimized functions, to be called by the user*/
 
     apContext->FS = (M4OSA_FileReadPointer*) M4OSA_malloc(sizeof(M4OSA_FileReadPointer),
-                                       M4OSA_FILE_READER, "NXPSW_FileReaderOptim_init");
+                                       M4OSA_FILE_READER, (M4OSA_Char *)"M4OSA_FileReaderOptim_init");
     if (M4OSA_NULL==apContext->FS)
     {
-        M4OSA_TRACE1_0("NXPSW_FileReaderOptim_init - ERROR : allocation failed");
+        M4OSA_TRACE1_0("M4OSA_FileReaderOptim_init - ERROR : allocation failed");
         return M4ERR_ALLOC;
     }
     apContext->FS->openRead  = M4OSA_fileReadOpen;
@@ -567,7 +567,7 @@
     apContext->FS->closeRead = M4OSA_fileReadClose;
     apContext->FS->setOption = M4OSA_fileReadSetOption;
     apContext->FS->getOption = M4OSA_fileReadGetOption;
-   #else
+#else
     apContext->FS = FS;
 #endif
 
diff --git a/libvideoeditor/osal/src/M4OSA_FileCache.c b/libvideoeditor/osal/src/M4OSA_FileCache.c
index a804123..0f66f96 100755
--- a/libvideoeditor/osal/src/M4OSA_FileCache.c
+++ b/libvideoeditor/osal/src/M4OSA_FileCache.c
@@ -431,7 +431,7 @@
         return;
     }
 
-    M4OSA_FileCache_internalQuicksort64(array,0,size-1);
+    M4OSA_FileCache_internalQuicksort64((M4OSA_Int64 * const)array, 0, size-1);
 }
 
 /* __________________________________________________________ */
diff --git a/libvideoeditor/osal/src/M4OSA_FileCommon.c b/libvideoeditor/osal/src/M4OSA_FileCommon.c
index 52fe781..d80d8cc 100755
--- a/libvideoeditor/osal/src/M4OSA_FileCommon.c
+++ b/libvideoeditor/osal/src/M4OSA_FileCommon.c
@@ -25,7 +25,7 @@
 
 #ifndef USE_STAGEFRIGHT_CODECS
 #error "USE_STAGEFRIGHT_CODECS is not defined"
-#endif USE_STAGEFRIGHT_CODECS
+#endif /*USE_STAGEFRIGHT_CODECS*/
 
 #ifdef UTF_CONVERSION
 #include <string.h>
diff --git a/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h b/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h
index d68c881..79831bb 100755
--- a/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h
+++ b/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h
@@ -112,34 +112,38 @@
 */
 typedef enum
 {
-    M4VIDEOEDITING_kMPEG4_SP_Level_0               = 0,
-    M4VIDEOEDITING_kMPEG4_SP_Level_0b              = 1,
-    M4VIDEOEDITING_kMPEG4_SP_Level_1               = 2,
-    M4VIDEOEDITING_kMPEG4_SP_Level_2               = 3,
-    M4VIDEOEDITING_kMPEG4_SP_Level_3               = 4,
-    M4VIDEOEDITING_kH263_Profile_0_Level_10        = 5,
-    M4VIDEOEDITING_kH263_Profile_0_Level_20        = 6,
-    M4VIDEOEDITING_kH263_Profile_0_Level_30        = 7,
-    M4VIDEOEDITING_kH263_Profile_0_Level_40        = 8,
-    M4VIDEOEDITING_kH263_Profile_0_Level_45        = 9,
-    M4VIDEOEDITING_kMPEG4_SP_Level_4a              = 10,
-    M4VIDEOEDITING_kMPEG4_SP_Level_5               = 11,
-    M4VIDEOEDITING_kH264_Profile_0_Level_1         = 12,
-    M4VIDEOEDITING_kH264_Profile_0_Level_1b        = 13,
-    M4VIDEOEDITING_kH264_Profile_0_Level_1_1       = 14,
-    M4VIDEOEDITING_kH264_Profile_0_Level_1_2       = 15,
-    M4VIDEOEDITING_kH264_Profile_0_Level_1_3       = 16,
-    M4VIDEOEDITING_kH264_Profile_0_Level_2         = 17,
-    M4VIDEOEDITING_kH264_Profile_0_Level_2_1       = 18,
-    M4VIDEOEDITING_kH264_Profile_0_Level_2_2       = 19,
-    M4VIDEOEDITING_kH264_Profile_0_Level_3         = 20,
-    M4VIDEOEDITING_kH264_Profile_0_Level_3_1       = 21,
-    M4VIDEOEDITING_kH264_Profile_0_Level_3_2       = 22,
-    M4VIDEOEDITING_kH264_Profile_0_Level_4         = 23,
-    M4VIDEOEDITING_kH264_Profile_0_Level_4_1       = 24,
-    M4VIDEOEDITING_kH264_Profile_0_Level_4_2       = 25,
-    M4VIDEOEDITING_kH264_Profile_0_Level_5         = 26,
-    M4VIDEOEDITING_kH264_Profile_0_Level_5_1       = 27,
+    /* H.263 Profiles and levels */
+    M4VIDEOEDITING_kH263_Profile_0_Level_10        = 0,
+    M4VIDEOEDITING_kH263_Profile_0_Level_20        = 1,
+    M4VIDEOEDITING_kH263_Profile_0_Level_30        = 2,
+    M4VIDEOEDITING_kH263_Profile_0_Level_40        = 3,
+    M4VIDEOEDITING_kH263_Profile_0_Level_45        = 4,
+    /* MPEG-4 Profiles and levels */
+    M4VIDEOEDITING_kMPEG4_SP_Level_0               = 50,
+    M4VIDEOEDITING_kMPEG4_SP_Level_0b              = 51,
+    M4VIDEOEDITING_kMPEG4_SP_Level_1               = 52,
+    M4VIDEOEDITING_kMPEG4_SP_Level_2               = 53,
+    M4VIDEOEDITING_kMPEG4_SP_Level_3               = 54,
+    M4VIDEOEDITING_kMPEG4_SP_Level_4a              = 55,
+    M4VIDEOEDITING_kMPEG4_SP_Level_5               = 56,
+    /* AVC Profiles and levels */
+    M4VIDEOEDITING_kH264_Profile_0_Level_1         = 150,
+    M4VIDEOEDITING_kH264_Profile_0_Level_1b        = 151,
+    M4VIDEOEDITING_kH264_Profile_0_Level_1_1       = 152,
+    M4VIDEOEDITING_kH264_Profile_0_Level_1_2       = 153,
+    M4VIDEOEDITING_kH264_Profile_0_Level_1_3       = 154,
+    M4VIDEOEDITING_kH264_Profile_0_Level_2         = 155,
+    M4VIDEOEDITING_kH264_Profile_0_Level_2_1       = 156,
+    M4VIDEOEDITING_kH264_Profile_0_Level_2_2       = 157,
+    M4VIDEOEDITING_kH264_Profile_0_Level_3         = 158,
+    M4VIDEOEDITING_kH264_Profile_0_Level_3_1       = 159,
+    M4VIDEOEDITING_kH264_Profile_0_Level_3_2       = 160,
+    M4VIDEOEDITING_kH264_Profile_0_Level_4         = 161,
+    M4VIDEOEDITING_kH264_Profile_0_Level_4_1       = 162,
+    M4VIDEOEDITING_kH264_Profile_0_Level_4_2       = 163,
+    M4VIDEOEDITING_kH264_Profile_0_Level_5         = 164,
+    M4VIDEOEDITING_kH264_Profile_0_Level_5_1       = 165,
+    /* Unsupported profile and level */
     M4VIDEOEDITING_kProfile_and_Level_Out_Of_Range = 255
 } M4VIDEOEDITING_VideoProfileAndLevel;
 
diff --git a/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h b/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h
index 069b84d..570b654 100755
--- a/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h
+++ b/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h
@@ -84,14 +84,21 @@
 
 typedef enum
 {
-    M4VSS3GPP_kEditVideoState_READ_WRITE    = 10,    /**< Doing Read/Write operation
-                                                        (no decoding/encoding) */
-    M4VSS3GPP_kEditVideoState_BEGIN_CUT     = 11,    /**< Decode encode to create an I frame */
-    M4VSS3GPP_kEditVideoState_DECODE_ENCODE = 12,    /**< Doing Read-Decode/Filter/
-                                                        Encode-Write operation */
-    M4VSS3GPP_kEditVideoState_TRANSITION    = 13,    /**< Transition; blending of two videos */
-    M4VSS3GPP_kEditVideoState_AFTER_CUT     = 14    /**< Special Read/Write mode after a
-                                                            begin cut (time frozen) */
+    /**< Doing Read/Write operation. This operation will have no processing
+     * on input frames. Only time stamp manipulations in output file. */
+    M4VSS3GPP_kEditVideoState_READ_WRITE    = 10,
+    /**< Decode encode to create an I frame. This is done for a single frame
+     * to create a new reference frame. */
+    M4VSS3GPP_kEditVideoState_BEGIN_CUT     = 11,
+    /**< Doing Read->Decode->Filter->Encode->Write operation on the input file
+     * to create the output file. */
+    M4VSS3GPP_kEditVideoState_DECODE_ENCODE = 12,
+    /**< Applied when Transition is active and blending of two videos is
+     * required. */
+    M4VSS3GPP_kEditVideoState_TRANSITION    = 13,
+    /**< Special Read/Write mode used after BEGIN_CUT state. The frame
+     * is already coded as I frame in BEGIN_CUT state; so skip it. */
+    M4VSS3GPP_kEditVideoState_AFTER_CUT     = 14
 }
 M4VSS3GPP_EditVideoState;
 
@@ -610,6 +617,8 @@
     M4OSA_UInt8              nbActiveEffects1;  /**< Numbers of active effects RC */
     M4OSA_Bool               m_bClipExternalHasStarted;  /**< Flag to indicate that an
                                                               external effect is active */
+    M4OSA_Int32              iInOutTimeOffset;
+    M4OSA_Bool               bEncodeTillEoF;
 } M4VSS3GPP_InternalEditContext;
 
 
diff --git a/libvideoeditor/vss/mcs/src/M4MCS_API.c b/libvideoeditor/vss/mcs/src/M4MCS_API.c
index d04befa..a327426 100755
--- a/libvideoeditor/vss/mcs/src/M4MCS_API.c
+++ b/libvideoeditor/vss/mcs/src/M4MCS_API.c
@@ -3237,12 +3237,6 @@
         M4OSA_free((M4OSA_MemAddr32)pC->H264MCSTempBuffer);
     }
 
-    if( M4OSA_NULL != pC->m_pInstance )
-    {
-        err = H264MCS_Freeinstance(pC->m_pInstance);
-        pC->m_pInstance = M4OSA_NULL;
-    }
-
     M4OSA_TRACE3_0("M4MCS_close(): returning M4NO_ERROR");
     return err;
 }
@@ -3307,6 +3301,12 @@
         return M4ERR_STATE;
     }
 
+    if( M4OSA_NULL != pC->m_pInstance )
+    {
+        err = H264MCS_Freeinstance(pC->m_pInstance);
+        pC->m_pInstance = M4OSA_NULL;
+    }
+
     /* ----- Free video encoder stuff, if needed ----- */
 
     if( ( M4OSA_NULL != pC->pViEncCtxt)
@@ -9410,7 +9410,7 @@
     * Check for end cut.
     * We must check here if the end cut is reached, because in that case we must
     * call the last encode step (-> bLastFrame set to true) */
-    if( ( pC->dViDecCurrentCts + pC->dCtsIncrement + 0.5) >= (pC->uiEndCutTime
+    if( ( pC->dViDecCurrentCts + pC->dCtsIncrement ) >= (pC->uiEndCutTime
         + M4MCS_ABS(pC->dViDecStartingCts - pC->uiBeginCutTime)) )
     {
         FrameMode =
diff --git a/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c b/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c
index bc75488..1d8c1cd 100755
--- a/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c
+++ b/libvideoeditor/vss/src/M4VD_EXTERNAL_BitstreamParser.c
@@ -503,18 +503,21 @@
 {
     M4OSA_ERR err = M4NO_ERROR;
     M4OSA_Bool NALSPS_and_Profile0Found = M4OSA_FALSE;
-    M4OSA_UInt16 index;
-    M4OSA_Bool    constraintSet3;
+    M4OSA_UInt16 index = 28; /* the 29th byte is SPS start */
+    M4OSA_Bool constraintSet3;
+
+    if (DSISize <= index) {
+        M4OSA_TRACE1_0("M4DECODER_EXTERNAL_ParseAVCDSI: DSI is invalid");
+        *profile = M4DECODER_AVC_kProfile_and_Level_Out_Of_Range;
+        return M4ERR_PARAMETER;
+    }
 
     /* check for baseline profile */
-    for(index = 0; index < (DSISize-1); index++)
+    if(((pDSI[index] & 0x1f) == 0x07) && (pDSI[index+1] == 0x42))
     {
-        if(((pDSI[index] & 0x1f) == 0x07) && (pDSI[index+1] == 0x42))
-        {
-            NALSPS_and_Profile0Found = M4OSA_TRUE;
-            break;
-        }
+        NALSPS_and_Profile0Found = M4OSA_TRUE;
     }
+
     if(M4OSA_FALSE == NALSPS_and_Profile0Found)
     {
         M4OSA_TRACE1_1("M4DECODER_EXTERNAL_ParseAVCDSI: index bad = %d", index);
diff --git a/libvideoeditor/vss/src/M4VSS3GPP_Clip.c b/libvideoeditor/vss/src/M4VSS3GPP_Clip.c
index 0a3b737..b6408b3 100755
--- a/libvideoeditor/vss/src/M4VSS3GPP_Clip.c
+++ b/libvideoeditor/vss/src/M4VSS3GPP_Clip.c
@@ -686,7 +686,7 @@
         pClipCtxt->pSettings->uiEndCutTime = (M4OSA_UInt32)iDuration;
     }
 
-    pClipCtxt->iEndTime = pClipCtxt->pSettings->uiEndCutTime;
+    pClipCtxt->iEndTime = (M4OSA_Int32)pClipCtxt->pSettings->uiEndCutTime;
 
     /**
     * Return with no error */
diff --git a/libvideoeditor/vss/src/M4VSS3GPP_Edit.c b/libvideoeditor/vss/src/M4VSS3GPP_Edit.c
index d495186..8ae0f15 100755
--- a/libvideoeditor/vss/src/M4VSS3GPP_Edit.c
+++ b/libvideoeditor/vss/src/M4VSS3GPP_Edit.c
@@ -229,6 +229,9 @@
 
     pC->bIsMMS = M4OSA_FALSE;
 
+    pC->iInOutTimeOffset = 0;
+    pC->bEncodeTillEoF = M4OSA_FALSE;
+
     /**
     * Return with no error */
     M4OSA_TRACE3_0("M4VSS3GPP_editInit(): returning M4NO_ERROR");
@@ -3289,8 +3292,9 @@
             }
         }
     }
-    /* The flag is set to false at the beginning of every clip */
+    /* The flags are set to false at the beginning of every clip */
     pC->m_bClipExternalHasStarted = M4OSA_FALSE;
+    pC->bEncodeTillEoF = M4OSA_FALSE;
 
     /**
     * Return with no error */
diff --git a/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c b/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c
index 9870426..fdc919a 100755
--- a/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c
+++ b/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c
@@ -93,12 +93,18 @@
     M4OSA_UInt16 offset;
 
     /**
-    * Check if we reached end cut */
-    // Decorrelate input and output encoding timestamp to handle encoder prefetch
-    if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset) >= pC->pC1->iEndTime )
+     * Check if we reached end cut. Decorrelate input and output encoding
+     * timestamp to handle encoder prefetch
+     */
+    if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset
+        + pC->iInOutTimeOffset) >= pC->pC1->iEndTime )
     {
         /* Re-adjust video to precise cut time */
-        // Decorrelate input and output encoding timestamp to handle encoder prefetch
+        pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts))
+            - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime;
+        if ( pC->iInOutTimeOffset < 0 ) {
+            pC->iInOutTimeOffset = 0;
+        }
 
         /**
         * Video is done for this clip */
@@ -723,11 +729,15 @@
         to catch all P-frames after the cut) */
         else if( M4OSA_TRUE == pC->bClip1AtBeginCut )
         {
-            if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
-                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) )
+            if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) {
+                pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
+                pC->bEncodeTillEoF = M4OSA_TRUE;
+            } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
+                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) {
                 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
-            else
+            } else {
                 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
+            }
         }
         /* Else we are in default copy/paste mode */
         else
@@ -771,7 +781,8 @@
                 }
             }
             else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) &&
-                    (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)))
+                    (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) &&
+                    pC->bEncodeTillEoF == M4OSA_FALSE)
             {
                 /**
                  * Test if we go into copy/paste mode or into decode/encode mode
@@ -829,7 +840,7 @@
         || (M4VSS3GPP_kEditVideoState_TRANSITION
         == previousVstate)) /**< encode mode */
         && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */
-        )
+        && (pC->bEncodeTillEoF == M4OSA_FALSE) )
     {
         pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
     }
@@ -1974,9 +1985,11 @@
 
              if(uiClipNumber ==1)
              {
-                if ((t >= (M4OSA_Int32)(pFx->uiStartTime)) &&                  /**< Are we after the start time of the effect? */
-                    (t <  (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) /**< Are we into the effect duration? */
-                {
+                /**< Are we after the start time of the effect?
+                 * or Are we into the effect duration?
+                 */
+                if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) &&
+                    (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
                     /**
                      * Set the active effect(s) */
                     pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex;
@@ -1993,17 +2006,19 @@
                     }
 
                     /**
-                     * The third effect has the highest priority, then the second one, then the first one.
-                     * Hence, as soon as we found an active effect, we can get out of this loop */
-
+                     * The third effect has the highest priority, then the
+                     * second one, then the first one. Hence, as soon as we
+                     * found an active effect, we can get out of this loop.
+                     */
                 }
             }
             else
             {
-                if ((t + pC->pTransitionList[uiClipIndex].uiTransitionDuration >=
-                   (M4OSA_Int32)(pFx->uiStartTime)) && (t + pC->pTransitionList[uiClipIndex].uiTransitionDuration
-                    <  (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) /**< Are we into the effect duration? */
-                 {
+                /**< Are we into the effect duration? */
+                if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
+                    >= (M4OSA_Int32)(pFx->uiStartTime))
+                    && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
+                    <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
                     /**
                      * Set the active effect(s) */
                     pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex;
diff --git a/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c b/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c
index f7226a3..df14a54 100755
--- a/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c
+++ b/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c
@@ -305,7 +305,7 @@
     err = M4VSS3GPP_registerAudioDecoder( pContext, audioDecoderType, pAudioDecoderInterface);
     M4OSA_DEBUG_IF1((err != M4NO_ERROR), err,
         "M4VSS3GPP_subscribeMediaAndCodec: can't register MP3 decoder");
-#endif  /* M4VSS_SUPPORT_AUDEC_MP3 */`
+#endif  /* M4VSS_SUPPORT_AUDEC_MP3 */
 
 
     /* --- NULL --- */
diff --git a/libvideoeditor/vss/src/M4xVSS_API.c b/libvideoeditor/vss/src/M4xVSS_API.c
index 42c24ed..84fb0cd 100755
--- a/libvideoeditor/vss/src/M4xVSS_API.c
+++ b/libvideoeditor/vss/src/M4xVSS_API.c
@@ -2459,22 +2459,14 @@
                             <= 0 || xVSS_context->pSettings->pClipList[i]->xVSS.
                             PanZoomTopleftXa > 1000
                             || xVSS_context->pSettings->pClipList[i]->xVSS.
-                            PanZoomTopleftXa < 0
-                            || xVSS_context->pSettings->pClipList[i]->xVSS.
                             PanZoomTopleftYa > 1000
-                            || xVSS_context->pSettings->pClipList[i]->xVSS.
-                            PanZoomTopleftYa < 0
                             || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb
                             > 1000
                             || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb
                             <= 0 || xVSS_context->pSettings->pClipList[i]->xVSS.
                             PanZoomTopleftXb > 1000
                             || xVSS_context->pSettings->pClipList[i]->xVSS.
-                            PanZoomTopleftXb < 0
-                            || xVSS_context->pSettings->pClipList[i]->xVSS.
-                            PanZoomTopleftYb > 1000
-                            || xVSS_context->pSettings->pClipList[i]->xVSS.
-                            PanZoomTopleftYb < 0 )
+                            PanZoomTopleftYb > 1000)
                         {
                             M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand");
                             M4xVSS_freeCommand(xVSS_context);
@@ -3689,32 +3681,27 @@
 
             /*Alpha blending*/
             /*Check if the alpha blending parameters are corrects*/
-            if( pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime < 0
-                || pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime > 100 )
+            if( pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime > 100 )
             {
                 pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime = 0;
             }
 
-            if( pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime < 0
-                || pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime > 100 )
+            if( pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime > 100 )
             {
                 pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime = 0;
             }
 
-            if( pSettings->Effects[j].xVSS.uialphaBlendingEnd < 0
-                || pSettings->Effects[j].xVSS.uialphaBlendingEnd > 100 )
+            if( pSettings->Effects[j].xVSS.uialphaBlendingEnd > 100 )
             {
                 pSettings->Effects[j].xVSS.uialphaBlendingEnd = 100;
             }
 
-            if( pSettings->Effects[j].xVSS.uialphaBlendingMiddle < 0
-                || pSettings->Effects[j].xVSS.uialphaBlendingMiddle > 100 )
+            if( pSettings->Effects[j].xVSS.uialphaBlendingMiddle > 100 )
             {
                 pSettings->Effects[j].xVSS.uialphaBlendingMiddle = 100;
             }
 
-            if( pSettings->Effects[j].xVSS.uialphaBlendingStart < 0
-                || pSettings->Effects[j].xVSS.uialphaBlendingStart > 100 )
+            if( pSettings->Effects[j].xVSS.uialphaBlendingStart > 100 )
             {
                 pSettings->Effects[j].xVSS.uialphaBlendingStart = 100;
             }
@@ -4088,34 +4075,27 @@
 
                 /*Alpha blending*/
                 /*Check if the alpha blending parameters are corrects*/
-                if( pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime < 0
-                    || pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime
-                > 100 )
+                if( pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime > 100 )
                 {
                     pSettings->Effects[j].xVSS.uialphaBlendingFadeInTime = 0;
                 }
 
-                if( pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime < 0
-                    || pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime
-                > 100 )
+                if( pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime > 100 )
                 {
                     pSettings->Effects[j].xVSS.uialphaBlendingFadeOutTime = 0;
                 }
 
-                if( pSettings->Effects[j].xVSS.uialphaBlendingEnd < 0
-                    || pSettings->Effects[j].xVSS.uialphaBlendingEnd > 100 )
+                if( pSettings->Effects[j].xVSS.uialphaBlendingEnd > 100 )
                 {
                     pSettings->Effects[j].xVSS.uialphaBlendingEnd = 100;
                 }
 
-                if( pSettings->Effects[j].xVSS.uialphaBlendingMiddle < 0
-                    || pSettings->Effects[j].xVSS.uialphaBlendingMiddle > 100 )
+                if( pSettings->Effects[j].xVSS.uialphaBlendingMiddle > 100 )
                 {
                     pSettings->Effects[j].xVSS.uialphaBlendingMiddle = 100;
                 }
 
-                if( pSettings->Effects[j].xVSS.uialphaBlendingStart < 0
-                    || pSettings->Effects[j].xVSS.uialphaBlendingStart > 100 )
+                if( pSettings->Effects[j].xVSS.uialphaBlendingStart > 100 )
                 {
                     pSettings->Effects[j].xVSS.uialphaBlendingStart = 100;
                 }
diff --git a/libvideoeditor/vss/src/M4xVSS_internal.c b/libvideoeditor/vss/src/M4xVSS_internal.c
index 71a8929..f32257a 100755
--- a/libvideoeditor/vss/src/M4xVSS_internal.c
+++ b/libvideoeditor/vss/src/M4xVSS_internal.c
@@ -2157,7 +2157,7 @@
         {
             M4OSA_TRACE1_0("Allocation error in \
                 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
-            M4OSA_free((M4OSA_MemAddr8)rgbPlane.pac_data);
+            M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
             return M4ERR_ALLOC;
         }
 
@@ -2176,7 +2176,7 @@
 
             input_pac_data += ((rgbPlane.u_width-1)*2);
         }
-        M4OSA_free((M4OSA_MemAddr8)rgbPlane.pac_data);
+        M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
         rgbPlane.pac_data = newRGBpac_data;
     }
 
@@ -2417,7 +2417,6 @@
     {
         M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
     }
-
     M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
     return err;
 }
@@ -3271,7 +3270,6 @@
                 {
                     if(framingCtx->aFramingCtx != M4OSA_NULL)
                     {
-                        if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL)
                         {
                             if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
                             {
@@ -3301,17 +3299,14 @@
                     }
                     if(framingCtx->aFramingCtx_last != M4OSA_NULL)
                     {
-                        if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL)
+                        if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
                         {
-                            if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
-                            {
-                                M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
-                                    FramingRgb->pac_data);
-                                framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
-                                M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
-                                    FramingRgb);
-                                framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
-                            }
+                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
+                                FramingRgb->pac_data);
+                            framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
+                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
+                                FramingRgb);
+                            framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
                         }
                         if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
                         {
@@ -3358,15 +3353,12 @@
                     if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
                     existant pointer */
                     {
-                        if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL)
+                        if(framingCtx->FramingRgb != M4OSA_NULL)
                         {
-                            if(framingCtx->FramingRgb != M4OSA_NULL)
-                            {
-                                M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
-                                framingCtx->FramingRgb->pac_data = M4OSA_NULL;
-                                M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb);
-                                framingCtx->FramingRgb = M4OSA_NULL;
-                            }
+                            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
+                            framingCtx->FramingRgb->pac_data = M4OSA_NULL;
+                            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb);
+                            framingCtx->FramingRgb = M4OSA_NULL;
                         }
                         if(framingCtx->FramingYuv != M4OSA_NULL)
                         {
@@ -4132,14 +4124,18 @@
 
                 if(alphaBlendingStruct != M4OSA_NULL)
                 {
-                    if(pProgress->uiProgress >= 0 && pProgress->uiProgress \
+                    if(pProgress->uiProgress \
                     < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
                     {
-                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
-                         - alphaBlendingStruct->m_start)\
-                            *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
-                        alphaBlending += alphaBlendingStruct->m_start;
-                        alphaBlending /= 100;
+                        if(alphaBlendingStruct->m_fadeInTime == 0) {
+                            alphaBlending = alphaBlendingStruct->m_start / 100;
+                        } else {
+                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
+                             - alphaBlendingStruct->m_start)\
+                                *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
+                            alphaBlending += alphaBlendingStruct->m_start;
+                            alphaBlending /= 100;
+                        }
                     }
                     else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
                     m_fadeInTime*10) && pProgress->uiProgress < 1000\
@@ -4151,11 +4147,15 @@
                     else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
                     (alphaBlendingStruct->m_fadeOutTime*10))
                     {
-                        alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
-                        - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
-                        /(alphaBlendingStruct->m_fadeOutTime*10);
-                        alphaBlending += alphaBlendingStruct->m_end;
-                        alphaBlending /= 100;
+                        if(alphaBlendingStruct->m_fadeOutTime == 0) {
+                            alphaBlending = alphaBlendingStruct->m_end / 100;
+                        } else {
+                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
+                            - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
+                            /(alphaBlendingStruct->m_fadeOutTime*10);
+                            alphaBlending += alphaBlendingStruct->m_end;
+                            alphaBlending /= 100;
+                        }
                     }
                 }
                 /**/
diff --git a/libvideoeditor/vss/src/VideoEditorResampler.cpp b/libvideoeditor/vss/src/VideoEditorResampler.cpp
index c4d1a8a..81709df 100755
--- a/libvideoeditor/vss/src/VideoEditorResampler.cpp
+++ b/libvideoeditor/vss/src/VideoEditorResampler.cpp
@@ -48,6 +48,7 @@
     M4OSA_Int32 outSamplingRate;
     M4OSA_Int32 inSamplingRate;
 
+    int16_t *mTmpInBuffer;
 };
 
 #define MAX_SAMPLEDURATION_FOR_CONVERTION 40 //ms
@@ -55,9 +56,9 @@
 status_t VideoEditorResampler::getNextBuffer(AudioBufferProvider::Buffer *pBuffer) {
 
     uint32_t dataSize = pBuffer->frameCount * this->nbChannels * sizeof(int16_t);
-    int16_t *pTmpInBuffer = (int16_t*)malloc(dataSize);
-    memcpy(pTmpInBuffer, this->mInput, dataSize);
-    pBuffer->raw = (void*)pTmpInBuffer;
+    mTmpInBuffer = (int16_t*)malloc(dataSize);
+    memcpy(mTmpInBuffer, this->mInput, dataSize);
+    pBuffer->raw = (void*)mTmpInBuffer;
 
     return OK;
 }
@@ -67,6 +68,7 @@
     if(pBuffer->raw != NULL) {
         free(pBuffer->raw);
         pBuffer->raw = NULL;
+        mTmpInBuffer = NULL;
     }
     pBuffer->frameCount = 0;
 }
@@ -87,6 +89,7 @@
     context->nbChannels = inChannelCount;
     context->outSamplingRate = sampleRate;
     context->mInput = NULL;
+    context->mTmpInBuffer = NULL;
 
     return ((M4OSA_Int32)context);
 }
@@ -119,6 +122,11 @@
     VideoEditorResampler *context =
        (VideoEditorResampler *)resamplerContext;
 
+    if (context->mTmpInBuffer != NULL) {
+        free(context->mTmpInBuffer);
+        context->mTmpInBuffer = NULL;
+    }
+
     if (context->mInput != NULL) {
         free(context->mInput);
         context->mInput = NULL;
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp
index eb48717..92116f6 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditor3gpReader.cpp
@@ -670,7 +670,6 @@
     break;
     case M4READER_3GP_kOptionID_H263Properties:
         {
-#if 0
             if(M4OSA_NULL == pC->mVideoStreamHandler) {
                 LOGV("VideoEditor3gpReader_getOption no videoStream retrieved");
 
@@ -678,7 +677,7 @@
                 break;
             }
             if((M4DA_StreamTypeVideoH263 != pC->mVideoStreamHandler->\
-                mStreamType) || (pC->mVideoStreamHandler->\
+                m_streamType) || (pC->mVideoStreamHandler->\
                 m_decoderSpecificInfoSize < 7)) {
                 LOGV("VideoEditor3gpReader_getOption DSI Size %d",
                     pC->mVideoStreamHandler->m_decoderSpecificInfoSize);
@@ -693,7 +692,6 @@
                 pC->mVideoStreamHandler->m_pDecoderSpecificInfo[6];
             ((M4READER_3GP_H263Properties *)pValue)->uiLevel =
                 pC->mVideoStreamHandler->m_pDecoderSpecificInfo[5];
-#endif
             LOGV("VideoEditor3gpReader_getOption M4READER_3GP_kOptionID_\
             H263Properties end");
         }
@@ -1520,14 +1518,8 @@
 
                 /* Get the DSI info */
                 if(M4DA_StreamTypeVideoH263 == streamType) {
-                    if (meta->findData(kKeyESDS, &type, &data, &size)) {
-                        ESDS esds((const char *)data, size);
-                        CHECK_EQ(esds.InitCheck(), OK);
-
-                        esds.getCodecSpecificInfo(
-                            &codec_specific_data, &codec_specific_data_size);
-                        (*pStreamHandler)->m_decoderSpecificInfoSize =
-                            codec_specific_data_size;
+                    if (meta->findData(kKeyD263, &type, &data, &size)) {
+                        (*pStreamHandler)->m_decoderSpecificInfoSize = size;
                         if ((*pStreamHandler)->m_decoderSpecificInfoSize != 0) {
                             DecoderSpecific = (M4OSA_UInt8*)M4OSA_malloc(
                                 (*pStreamHandler)->m_decoderSpecificInfoSize,
@@ -1536,15 +1528,19 @@
                                 return M4ERR_ALLOC;
                             }
                             M4OSA_memcpy((M4OSA_MemAddr8)DecoderSpecific,
-                                (M4OSA_MemAddr8)codec_specific_data,
-                                codec_specific_data_size);
+                                (M4OSA_MemAddr8)data, size);
                             (*pStreamHandler)->m_pDecoderSpecificInfo =
                                 DecoderSpecific;
                         }
                         else {
                             (*pStreamHandler)->m_pDecoderSpecificInfo =
                                 M4OSA_NULL;
+                            (*pStreamHandler)->m_decoderSpecificInfoSize = 0;
                         }
+                        (*pStreamHandler)->m_pESDSInfo = M4OSA_NULL;
+                        (*pStreamHandler)->m_ESDSInfoSize = 0;
+                        (*pStreamHandler)->m_pH264DecoderSpecificInfo = M4OSA_NULL;
+                        (*pStreamHandler)->m_H264decoderSpecificInfoSize = 0;
                     } else {
                         LOGV("VE_getNextStreamHandler: H263 dsi not found");
                         (*pStreamHandler)->m_pDecoderSpecificInfo = M4OSA_NULL;
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c b/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c
index 9f50a58..e4de140 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorBuffer.c
@@ -208,7 +208,7 @@
 
     /**
      * Initialize all the buffers in the pool */
-    for(index = 0; index< pool->NB; index++)
+    for(index = 0; index < pool->NB; index++)
     {
         pool->pNXPBuffer[index].pData = M4OSA_NULL;
         pool->pNXPBuffer[index].pData = (M4OSA_Void*)M4OSA_malloc(
@@ -218,10 +218,10 @@
         {
             for (j = 0; j < index; j++)
             {
-                if(M4OSA_NULL != pool->pNXPBuffer[index].pData)
+                if(M4OSA_NULL != pool->pNXPBuffer[j].pData)
                 {
-                    M4OSA_free((M4OSA_MemAddr32)pool->pNXPBuffer[index].pData);
-                    pool->pNXPBuffer[index].pData = M4OSA_NULL;
+                    M4OSA_free((M4OSA_MemAddr32)pool->pNXPBuffer[j].pData);
+                    pool->pNXPBuffer[j].pData = M4OSA_NULL;
                 }
             }
             err = M4ERR_ALLOC;
diff --git a/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp b/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp
index a8b9c0c..5f6b90c 100755
--- a/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp
+++ b/libvideoeditor/vss/stagefrightshells/src/VideoEditorVideoDecoder.cpp
@@ -745,9 +745,9 @@
         cropRight = vWidth - 1;
         cropBottom = vHeight - 1;
 
-        LOGI("got dimensions only %d x %d", width, height);
+        LOGV("got dimensions only %d x %d", width, height);
     } else {
-        LOGI("got crop rect %d, %d, %d, %d",
+        LOGV("got crop rect %d, %d, %d, %d",
              cropLeft, cropTop, cropRight, cropBottom);
     }
 
diff --git a/libvideoeditor/vss/video_filters/src/M4VIFI_ResizeRGB888toRGB888.c b/libvideoeditor/vss/video_filters/src/M4VIFI_ResizeRGB888toRGB888.c
index a21e1d0..ebd8b19 100755
--- a/libvideoeditor/vss/video_filters/src/M4VIFI_ResizeRGB888toRGB888.c
+++ b/libvideoeditor/vss/video_filters/src/M4VIFI_ResizeRGB888toRGB888.c
@@ -78,13 +78,13 @@
     M4VIFI_UInt32    i32_b03, i32_g03, i32_r03;
 
     /* Check for the YUV width and height are even */
-    if( (IS_EVEN(pPlaneIn->u_height) == FALSE)    ||
+    if ((IS_EVEN(pPlaneIn->u_height) == FALSE)    ||
         (IS_EVEN(pPlaneOut->u_height) == FALSE))
     {
         return M4VIFI_ILLEGAL_FRAME_HEIGHT;
     }
 
-    if( (IS_EVEN(pPlaneIn->u_width) == FALSE) ||
+    if ((IS_EVEN(pPlaneIn->u_width) == FALSE) ||
         (IS_EVEN(pPlaneOut->u_width) == FALSE))
     {
         return M4VIFI_ILLEGAL_FRAME_WIDTH;
@@ -197,12 +197,47 @@
                 pu8_src_bottom = pu8_src_top + (u32_stride_in);
                 u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */
 
-                /* Weighted combination */
-                GET_RGB24(i32_b00,i32_g00,i32_r00,pu8_src_top,0);
-                GET_RGB24(i32_b01,i32_g01,i32_r01,pu8_src_top,3);
-                GET_RGB24(i32_b02,i32_g02,i32_r02,pu8_src_bottom,0);
-                GET_RGB24(i32_b03,i32_g03,i32_r03,pu8_src_bottom,3);
-
+                if ((u32_width == 1) && (u32_width_in == u32_width_out)) {
+                    /*
+                       When input height is equal to output height and input width
+                       equal to output width, replicate the corner pixels for
+                       interpolation
+                    */
+                    if ((u32_height == 1) && (u32_height_in == u32_height_out)) {
+                        GET_RGB24(i32_b00,i32_g00,i32_r00,pu8_src_top,0);
+                        GET_RGB24(i32_b01,i32_g01,i32_r01,pu8_src_top,0);
+                        GET_RGB24(i32_b02,i32_g02,i32_r02,pu8_src_top,0);
+                        GET_RGB24(i32_b03,i32_g03,i32_r03,pu8_src_top,0);
+                    }
+                    /*
+                       When input height is not equal to output height and
+                       input width equal to output width, replicate the
+                       column for interpolation
+                    */
+                    else {
+                        GET_RGB24(i32_b00,i32_g00,i32_r00,pu8_src_top,0);
+                        GET_RGB24(i32_b01,i32_g01,i32_r01,pu8_src_top,0);
+                        GET_RGB24(i32_b02,i32_g02,i32_r02,pu8_src_bottom,0);
+                        GET_RGB24(i32_b03,i32_g03,i32_r03,pu8_src_bottom,0);
+                    }
+                } else {
+                    /*
+                       When input height is equal to output height and
+                       input width not equal to output width, replicate the
+                       row for interpolation
+                    */
+                    if ((u32_height == 1) && (u32_height_in == u32_height_out)) {
+                        GET_RGB24(i32_b00,i32_g00,i32_r00,pu8_src_top,0);
+                        GET_RGB24(i32_b01,i32_g01,i32_r01,pu8_src_top,3);
+                        GET_RGB24(i32_b02,i32_g02,i32_r02,pu8_src_top,0);
+                        GET_RGB24(i32_b03,i32_g03,i32_r03,pu8_src_top,3);
+                    } else {
+                        GET_RGB24(i32_b00,i32_g00,i32_r00,pu8_src_top,0);
+                        GET_RGB24(i32_b01,i32_g01,i32_r01,pu8_src_top,3);
+                        GET_RGB24(i32_b02,i32_g02,i32_r02,pu8_src_bottom,0);
+                        GET_RGB24(i32_b03,i32_g03,i32_r03,pu8_src_bottom,3);
+                    }
+                }
                 u32_Rtemp_value = (M4VIFI_UInt8)(((i32_r00*(16-u32_x_frac) +
                                  i32_r01*u32_x_frac)*(16-u32_y_frac) +
                                 (i32_r02*(16-u32_x_frac) +