am d4b22ab4: status_t != bool

* commit 'd4b22ab4889f9b1885bfc0dc45667c846a171a98':
  status_t != bool
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 528d197..7cb8f62 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -30,7 +30,6 @@
 #include <binder/ProcessState.h>
 #include <media/IMediaPlayerService.h>
 #include <media/stagefright/foundation/ALooper.h>
-#include "include/ARTSPController.h"
 #include "include/LiveSession.h"
 #include "include/NuCachedSource2.h"
 #include <media/stagefright/AudioPlayer.h>
@@ -636,7 +635,6 @@
     gDisplayHistogram = false;
 
     sp<ALooper> looper;
-    sp<ARTSPController> rtspController;
     sp<LiveSession> liveSession;
 
     int res;
@@ -948,7 +946,6 @@
         sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
 
         if (strncasecmp(filename, "sine:", 5)
-                && strncasecmp(filename, "rtsp://", 7)
                 && strncasecmp(filename, "httplive://", 11)
                 && dataSource == NULL) {
             fprintf(stderr, "Unable to create data source.\n");
@@ -984,23 +981,7 @@
         } else {
             sp<MediaExtractor> extractor;
 
-            if (!strncasecmp("rtsp://", filename, 7)) {
-                if (looper == NULL) {
-                    looper = new ALooper;
-                    looper->start();
-                }
-
-                rtspController = new ARTSPController(looper);
-                status_t err = rtspController->connect(filename);
-                if (err != OK) {
-                    fprintf(stderr, "could not connect to rtsp server.\n");
-                    return -1;
-                }
-
-                extractor = rtspController.get();
-
-                syncInfoPresent = false;
-            } else if (!strncasecmp("httplive://", filename, 11)) {
+            if (!strncasecmp("httplive://", filename, 11)) {
                 String8 uri("http://");
                 uri.append(filename + 11);
 
@@ -1021,6 +1002,7 @@
                 syncInfoPresent = false;
             } else {
                 extractor = MediaExtractor::Create(dataSource);
+
                 if (extractor == NULL) {
                     fprintf(stderr, "could not create extractor.\n");
                     return -1;
@@ -1116,13 +1098,6 @@
         } else {
             playSource(&client, mediaSource);
         }
-
-        if (rtspController != NULL) {
-            rtspController->disconnect();
-            rtspController.clear();
-
-            sleep(3);
-        }
     }
 
     if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index 2378345..bd430d1 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -360,7 +360,7 @@
         service->create(getpid(), client, 0);
 
     if (player != NULL && player->setDataSource(source) == NO_ERROR) {
-        player->setVideoSurface(surface);
+        player->setVideoSurfaceTexture(surface->getSurfaceTexture());
         player->start();
 
         client->waitForEOS();
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index 0e2cdf7..e905903 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -40,7 +40,6 @@
                                     const KeyedVector<String8, String8>* headers) = 0;
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
-    virtual status_t        setVideoSurface(const sp<Surface>& surface) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<ISurfaceTexture>& surfaceTexture) = 0;
     virtual status_t        prepareAsync() = 0;
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 4328d3c..80f43a3 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -117,9 +117,6 @@
         return INVALID_OPERATION;
     }
 
-    // pass the buffered Surface to the media player service
-    virtual status_t    setVideoSurface(const sp<Surface>& surface) = 0;
-
     // pass the buffered ISurfaceTexture to the media player service
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<ISurfaceTexture>& surfaceTexture) = 0;
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index eab7648..250f267 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -48,15 +48,21 @@
 };
 
 /**
- *Set CIF as default maximum import and export resolution of video editor.
- *The maximum import and export resolutions are platform specific,
- *which should be defined in media_profiles.xml.
+ * Set CIF as default maximum import and export resolution of video editor.
+ * The maximum import and export resolutions are platform specific,
+ * which should be defined in media_profiles.xml.
+ * Set default maximum prefetch YUV frames to 6, which means video editor can
+ * queue up to 6 YUV frames in the video encoder source.
+ * This value is used to limit the amount of memory used by video editor
+ * engine when the encoder consumes YUV frames at a lower speed
+ * than video editor engine produces.
  */
 enum videoeditor_capability {
     VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH = 352,
     VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT = 288,
     VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH = 352,
     VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT = 288,
+    VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES = 6
 };
 
 enum video_decoder {
@@ -138,6 +144,8 @@
      * videoeditor.input.height.max - max input video frame height
      * videoeditor.output.width.max - max output video frame width
      * videoeditor.output.height.max - max output video frame height
+     * maxPrefetchYUVFrames - max prefetch YUV frames in video editor engine. This value is used
+     * to limit the memory consumption.
      */
     int getVideoEditorCapParamByName(const char *name) const;
 
@@ -357,11 +365,12 @@
     };
     struct VideoEditorCap {
         VideoEditorCap(int inFrameWidth, int inFrameHeight,
-            int outFrameWidth, int outFrameHeight)
+            int outFrameWidth, int outFrameHeight, int frames)
             : mMaxInputFrameWidth(inFrameWidth),
               mMaxInputFrameHeight(inFrameHeight),
               mMaxOutputFrameWidth(outFrameWidth),
-              mMaxOutputFrameHeight(outFrameHeight) {}
+              mMaxOutputFrameHeight(outFrameHeight),
+              mMaxPrefetchYUVFrames(frames) {}
 
         ~VideoEditorCap() {}
 
@@ -369,6 +378,7 @@
         int mMaxInputFrameHeight;
         int mMaxOutputFrameWidth;
         int mMaxOutputFrameHeight;
+        int mMaxPrefetchYUVFrames;
     };
 
     int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
diff --git a/include/media/mediametadataretriever.h b/include/media/mediametadataretriever.h
index 9aa6700..534afce 100644
--- a/include/media/mediametadataretriever.h
+++ b/include/media/mediametadataretriever.h
@@ -54,6 +54,7 @@
     METADATA_KEY_BITRATE         = 20,
     METADATA_KEY_TIMED_TEXT_LANGUAGES      = 21,
     METADATA_KEY_IS_DRM          = 22,
+    METADATA_KEY_LOCATION        = 23,
 
     // Add more here...
 };
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index 08835fb..e6a0cc5 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -170,7 +170,6 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setDataSource(const sp<IStreamSource> &source);
-            status_t        setVideoSurface(const sp<Surface>& surface);
             status_t        setVideoSurfaceTexture(
                                     const sp<ISurfaceTexture>& surfaceTexture);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 3e48459..2eb259e 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -31,7 +31,9 @@
 
 extern const char *MEDIA_MIMETYPE_AUDIO_AMR_NB;
 extern const char *MEDIA_MIMETYPE_AUDIO_AMR_WB;
-extern const char *MEDIA_MIMETYPE_AUDIO_MPEG;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEG;           // layer III
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I;
+extern const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II;
 extern const char *MEDIA_MIMETYPE_AUDIO_AAC;
 extern const char *MEDIA_MIMETYPE_AUDIO_QCELP;
 extern const char *MEDIA_MIMETYPE_AUDIO_VORBIS;
@@ -47,6 +49,7 @@
 extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA;
 extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS;
 extern const char *MEDIA_MIMETYPE_CONTAINER_AVI;
+extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS;
 
 extern const char *MEDIA_MIMETYPE_CONTAINER_WVM;
 
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index 57f678c..4cdee17 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -85,6 +85,7 @@
     kKeyDate              = 'date',  // cstring
     kKeyWriter            = 'writ',  // cstring
     kKeyCompilation       = 'cpil',  // cstring
+    kKeyLocation          = 'loc ',  // cstring
     kKeyTimeScale         = 'tmsl',  // int32_t
 
     // video profile and level
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index efa1c45..5a1e93a 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -49,6 +49,16 @@
         }\
     }
 
+
+static inline int16_t clamp16(int32_t sample)
+{
+    // check overflow for both positive and negative values:
+    // all bits above short range must me equal to sign bit
+    if ((sample>>15) ^ (sample>>31))
+        sample = 0x7FFF ^ (sample>>31);
+    return sample;
+}
+
 // Namespaces
 namespace android {
 namespace {
@@ -707,13 +717,6 @@
 }   /* end LvmBundle_init */
 
 
-static inline int16_t clamp16(int32_t sample)
-{
-    if ((sample>>15) ^ (sample>>31))
-        sample = 0x7FFF ^ (sample>>31);
-    return sample;
-}
-
 //----------------------------------------------------------------------------
 // LvmBundle_process()
 //----------------------------------------------------------------------------
@@ -2459,6 +2462,9 @@
     LOGV("\tEffect_setEnabled() type %d, enabled %d", pContext->EffectType, enabled);
 
     if (enabled) {
+        // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due
+        // to their nature.
+        bool tempDisabled = false;
         switch (pContext->EffectType) {
             case LVM_BASS_BOOST:
                 if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
@@ -2471,6 +2477,7 @@
                 pContext->pBundledContext->SamplesToExitCountBb =
                      (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
                 pContext->pBundledContext->bBassEnabled = LVM_TRUE;
+                tempDisabled = pContext->pBundledContext->bBassTempDisabled;
                 break;
             case LVM_EQUALIZER:
                 if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {
@@ -2495,6 +2502,7 @@
                 pContext->pBundledContext->SamplesToExitCountVirt =
                      (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1);
                 pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE;
+                tempDisabled = pContext->pBundledContext->bVirtualizerTempDisabled;
                 break;
             case LVM_VOLUME:
                 if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE) {
@@ -2508,7 +2516,9 @@
                 LOGV("\tEffect_setEnabled() invalid effect type");
                 return -EINVAL;
         }
-        LvmEffect_enable(pContext);
+        if (!tempDisabled) {
+            LvmEffect_enable(pContext);
+        }
     } else {
         switch (pContext->EffectType) {
             case LVM_BASS_BOOST:
@@ -2683,12 +2693,19 @@
             LOGV("\tLVM_ERROR : LvmBundle_process returned error %d", lvmStatus);
             return lvmStatus;
         }
-    }else{
+    } else {
         //LOGV("\tEffect_process Not Calling process with %d effects enabled, %d called: Effect %d",
         //pContext->pBundledContext->NumberEffectsEnabled,
         //pContext->pBundledContext->NumberEffectsCalled, pContext->EffectType);
         // 2 is for stereo input
-        memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount*sizeof(LVM_INT16)*2);
+        if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+            for (size_t i=0; i < outBuffer->frameCount*2; i++){
+                outBuffer->s16[i] =
+                        clamp16((LVM_INT32)outBuffer->s16[i] + (LVM_INT32)inBuffer->s16[i]);
+            }
+        } else {
+            memcpy(outBuffer->raw, inBuffer->raw, outBuffer->frameCount*sizeof(LVM_INT16)*2);
+        }
     }
 
     return status;
@@ -3047,9 +3064,10 @@
             LOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start");
             uint32_t device = *(uint32_t *)pCmdData;
 
-            if(pContext->EffectType == LVM_BASS_BOOST){
-                if((device == AUDIO_DEVICE_OUT_SPEAKER)||(device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
-                   (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+            if (pContext->EffectType == LVM_BASS_BOOST) {
+                if((device == AUDIO_DEVICE_OUT_SPEAKER) ||
+                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) ||
+                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
                     LOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d",
                           *(int32_t *)pCmdData);
                     LOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST");
@@ -3058,30 +3076,31 @@
                     // the effect must still report its original state as this can only be changed
                     // by the ENABLE/DISABLE command
 
-                    if(pContext->pBundledContext->bBassEnabled == LVM_TRUE){
+                    if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
                         LOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_BASS_BOOST %d",
                              *(int32_t *)pCmdData);
                         android::LvmEffect_disable(pContext);
-                        pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
                     }
-                }else{
+                    pContext->pBundledContext->bBassTempDisabled = LVM_TRUE;
+                } else {
                     LOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_BASS_BOOST %d",
                          *(int32_t *)pCmdData);
 
                     // If a device supports bassboost and the effect has been temporarily disabled
                     // previously then re-enable it
 
-                    if(pContext->pBundledContext->bBassTempDisabled == LVM_TRUE){
+                    if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
                         LOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_BASS_BOOST %d",
                              *(int32_t *)pCmdData);
                         android::LvmEffect_enable(pContext);
-                        pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
                     }
+                    pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
                 }
             }
-            if(pContext->EffectType == LVM_VIRTUALIZER){
-                if((device == AUDIO_DEVICE_OUT_SPEAKER)||(device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
-                   (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
+            if (pContext->EffectType == LVM_VIRTUALIZER) {
+                if((device == AUDIO_DEVICE_OUT_SPEAKER)||
+                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
+                        (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
                     LOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
                           *(int32_t *)pCmdData);
                     LOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
@@ -3090,25 +3109,25 @@
                     // the effect must still report its original state as this can only be changed
                     // by the ENABLE/DISABLE command
 
-                    if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
+                    if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
                         LOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
                               *(int32_t *)pCmdData);
                         android::LvmEffect_disable(pContext);
-                        pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
                     }
-                }else{
+                    pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
+                } else {
                     LOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
                           *(int32_t *)pCmdData);
 
                     // If a device supports virtualizer and the effect has been temporarily disabled
                     // previously then re-enable it
 
-                    if(pContext->pBundledContext->bVirtualizerTempDisabled == LVM_TRUE){
+                    if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
                         LOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
                               *(int32_t *)pCmdData);
                         android::LvmEffect_enable(pContext);
-                        pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
                     }
+                    pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
                 }
             }
             LOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end");
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index 3c3af8f..1a06cc6 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -47,17 +47,22 @@
     VISUALIZER_STATE_ACTIVE,
 };
 
+// maximum number of reads from same buffer before resetting capture buffer. This means
+// that the framework has stopped playing audio and we must start returning silence
+#define MAX_STALL_COUNT 10
+
 struct VisualizerContext {
     const struct effect_interface_s *mItfe;
     effect_config_t mConfig;
-    uint32_t mState;
     uint32_t mCaptureIdx;
     uint32_t mCaptureSize;
-    uint32_t mCurrentBuf;
+    uint8_t mState;
+    uint8_t mCurrentBuf;
+    uint8_t mLastBuf;
+    uint8_t mStallCount;
     uint8_t mCaptureBuf[2][VISUALIZER_CAPTURE_SIZE_MAX];
 };
 
-
 //
 //--- Local functions
 //
@@ -66,6 +71,8 @@
 {
     pContext->mCaptureIdx = 0;
     pContext->mCurrentBuf = 0;
+    pContext->mLastBuf = 1;
+    pContext->mStallCount = 0;
     memset(pContext->mCaptureBuf[0], 0x80, VISUALIZER_CAPTURE_SIZE_MAX);
     memset(pContext->mCaptureBuf[1], 0x80, VISUALIZER_CAPTURE_SIZE_MAX);
 }
@@ -417,9 +424,24 @@
             memcpy(pReplyData,
                    pContext->mCaptureBuf[pContext->mCurrentBuf ^ 1],
                    pContext->mCaptureSize);
+            // if audio framework has stopped playing audio although the effect is still
+            // active we must clear the capture buffer to return silence
+            if (pContext->mLastBuf == pContext->mCurrentBuf) {
+                if (pContext->mStallCount < MAX_STALL_COUNT) {
+                    if (++pContext->mStallCount == MAX_STALL_COUNT) {
+                        memset(pContext->mCaptureBuf[pContext->mCurrentBuf ^ 1],
+                                0x80,
+                                pContext->mCaptureSize);
+                    }
+                }
+            } else {
+                pContext->mStallCount = 0;
+            }
+            pContext->mLastBuf = pContext->mCurrentBuf;
         } else {
             memset(pReplyData, 0x80, pContext->mCaptureSize);
         }
+
         break;
 
     default:
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 50a41ca..9c1e6b7 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -35,7 +35,6 @@
     SET_DATA_SOURCE_URL,
     SET_DATA_SOURCE_FD,
     SET_DATA_SOURCE_STREAM,
-    SET_VIDEO_SURFACE,
     PREPARE_ASYNC,
     START,
     STOP,
@@ -112,16 +111,6 @@
         return reply.readInt32();
     }
 
-    // pass the buffered Surface to the media player service
-    status_t setVideoSurface(const sp<Surface>& surface)
-    {
-        Parcel data, reply;
-        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
-        Surface::writeToParcel(surface, &data);
-        remote()->transact(SET_VIDEO_SURFACE, data, &reply);
-        return reply.readInt32();
-    }
-
     // pass the buffered ISurfaceTexture to the media player service
     status_t setVideoSurfaceTexture(const sp<ISurfaceTexture>& surfaceTexture)
     {
@@ -345,12 +334,6 @@
             reply->writeInt32(setDataSource(source));
             return NO_ERROR;
         }
-        case SET_VIDEO_SURFACE: {
-            CHECK_INTERFACE(IMediaPlayer, data, reply);
-            sp<Surface> surface = Surface::readFromParcel(data);
-            reply->writeInt32(setVideoSurface(surface));
-            return NO_ERROR;
-        } break;
         case SET_VIDEO_SURFACETEXTURE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             sp<ISurfaceTexture> surfaceTexture =
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index ad55ff8..6096b72 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -404,11 +404,12 @@
     CHECK(!strcmp("maxInputFrameWidth", atts[0]) &&
           !strcmp("maxInputFrameHeight", atts[2])  &&
           !strcmp("maxOutputFrameWidth", atts[4]) &&
-          !strcmp("maxOutputFrameHeight", atts[6]));
+          !strcmp("maxOutputFrameHeight", atts[6]) &&
+          !strcmp("maxPrefetchYUVFrames", atts[8]));
 
     MediaProfiles::VideoEditorCap *pVideoEditorCap =
         new MediaProfiles::VideoEditorCap(atoi(atts[1]), atoi(atts[3]),
-                atoi(atts[5]), atoi(atts[7]));
+                atoi(atts[5]), atoi(atts[7]), atoi(atts[9]));
 
     logVideoEditorCap(*pVideoEditorCap);
     profiles->mVideoEditorCap = pVideoEditorCap;
@@ -850,7 +851,8 @@
                 VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH,
                 VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT,
                 VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH,
-                VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT);
+                VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT,
+                VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES);
 }
 /*static*/ void
 MediaProfiles::createDefaultExportVideoProfiles(MediaProfiles *profiles)
@@ -1019,6 +1021,8 @@
         return mVideoEditorCap->mMaxOutputFrameWidth;
     if (!strcmp("videoeditor.output.height.max", name))
         return mVideoEditorCap->mMaxOutputFrameHeight;
+    if (!strcmp("maxPrefetchYUVFrames", name))
+        return mVideoEditorCap->mMaxPrefetchYUVFrames;
 
     LOGE("The given video editor param name %s is not found", name);
     return -1;
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index f72300b..c2e1ddf 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -219,14 +219,6 @@
     return mPlayer->getMetadata(update_only, apply_filter, metadata);
 }
 
-status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
-{
-    LOGV("setVideoSurface");
-    Mutex::Autolock _l(mLock);
-    if (mPlayer == 0) return NO_INIT;
-    return mPlayer->setVideoSurface(surface);
-}
-
 status_t MediaPlayer::setVideoSurfaceTexture(
         const sp<ISurfaceTexture>& surfaceTexture)
 {
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index ec7d8a0..a3e2517 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -32,8 +32,8 @@
 	libdl
 
 LOCAL_STATIC_LIBRARIES := \
-        libstagefright_rtsp                     \
         libstagefright_nuplayer                 \
+        libstagefright_rtsp                     \
 
 LOCAL_C_INCLUDES :=                                                 \
 	$(JNI_H_INCLUDE)                                                \
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index ba9f54f..e8d0f0c 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -589,6 +589,10 @@
         }
     }
 
+    if (!strncasecmp("rtsp://", url, 7)) {
+        return NU_PLAYER;
+    }
+
     // use MidiFile for MIDI extensions
     int lenURL = strlen(url);
     for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
@@ -784,14 +788,6 @@
     return mStatus;
 }
 
-status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
-{
-    LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
-    sp<MediaPlayerBase> p = getPlayer();
-    if (p == 0) return UNKNOWN_ERROR;
-    return p->setVideoSurface(surface);
-}
-
 void MediaPlayerService::Client::disconnectNativeWindow() {
     if (mConnectedWindow != NULL) {
         status_t err = native_window_api_disconnect(mConnectedWindow.get(),
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 62214ba..04d9e28 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -248,7 +248,6 @@
 
         // IMediaPlayer interface
         virtual void            disconnect();
-        virtual status_t        setVideoSurface(const sp<Surface>& surface);
         virtual status_t        setVideoSurfaceTexture(
                                         const sp<ISurfaceTexture>& surfaceTexture);
         virtual status_t        prepareAsync();
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index b35696f..3469389 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,6 @@
             const char* path, const KeyedVector<String8, String8> *headers);
 
     virtual status_t    setDataSource(int fd, int64_t offset, int64_t length);
-    virtual status_t    setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<ISurfaceTexture>& surfaceTexture)
                             { return UNKNOWN_ERROR; }
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index cd4b1ef..598d573 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -69,12 +69,6 @@
     return mPlayer->setDataSource(source);
 }
 
-status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
-    LOGV("setVideoSurface");
-
-    return mPlayer->setSurface(surface);
-}
-
 status_t StagefrightPlayer::setVideoSurfaceTexture(
         const sp<ISurfaceTexture> &surfaceTexture) {
     LOGV("setVideoSurfaceTexture");
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index cbc6d49..e89e18a 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -40,7 +40,6 @@
 
     virtual status_t setDataSource(const sp<IStreamSource> &source);
 
-    virtual status_t setVideoSurface(const sp<Surface> &surface);
     virtual status_t setVideoSurfaceTexture(
             const sp<ISurfaceTexture> &surfaceTexture);
     virtual status_t prepare();
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 802a11b..91ffa7d 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,9 +75,6 @@
 
 
     // All the methods below wrap the mPlayer instance.
-    virtual status_t setVideoSurface(const android::sp<android::Surface>& s)  {
-        return mPlayer->setVideoSurface(s);
-    }
     virtual status_t setVideoSurfaceTexture(
             const android::sp<android::ISurfaceTexture>& st)  {
         return mPlayer->setVideoSurfaceTexture(st);
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index e761509..33e2f93 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -8,6 +8,7 @@
         NuPlayerDriver.cpp              \
         NuPlayerRenderer.cpp            \
         NuPlayerStreamListener.cpp      \
+        RTSPSource.cpp                  \
         StreamingSource.cpp             \
 
 LOCAL_C_INCLUDES := \
@@ -15,6 +16,7 @@
 	$(TOP)/frameworks/base/media/libstagefright/include             \
         $(TOP)/frameworks/base/media/libstagefright/mpeg2ts             \
         $(TOP)/frameworks/base/media/libstagefright/httplive            \
+        $(TOP)/frameworks/base/media/libstagefright/rtsp                \
 
 LOCAL_MODULE:= libstagefright_nuplayer
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 6c54130..2a5c0a6 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -25,6 +25,7 @@
 #include "NuPlayerDriver.h"
 #include "NuPlayerRenderer.h"
 #include "NuPlayerSource.h"
+#include "RTSPSource.h"
 #include "StreamingSource.h"
 
 #include "ATSParser.h"
@@ -87,13 +88,14 @@
         const char *url, const KeyedVector<String8, String8> *headers) {
     sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
 
-    msg->setObject("source", new HTTPLiveSource(url, headers, mUIDValid, mUID));
-    msg->post();
-}
+    if (!strncasecmp(url, "rtsp://", 7)) {
+        msg->setObject(
+                "source", new RTSPSource(url, headers, mUIDValid, mUID));
+    } else {
+        msg->setObject(
+                "source", new HTTPLiveSource(url, headers, mUIDValid, mUID));
+    }
 
-void NuPlayer::setVideoSurface(const sp<Surface> &surface) {
-    sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id());
-    msg->setObject("native-window", new NativeWindowWrapper(surface));
     msg->post();
 }
 
@@ -568,8 +570,15 @@
     CHECK(mAudioDecoder == NULL);
     CHECK(mVideoDecoder == NULL);
 
+    ++mScanSourcesGeneration;
+    mScanSourcesPending = false;
+
     mRenderer.clear();
-    mSource.clear();
+
+    if (mSource != NULL) {
+        mSource->stop();
+        mSource.clear();
+    }
 
     if (mDriver != NULL) {
         sp<NuPlayerDriver> driver = mDriver.promote();
@@ -781,7 +790,7 @@
         return;
     }
 
-    driver->sendEvent(msg, ext1, ext2);
+    driver->notifyListener(msg, ext1, ext2);
 }
 
 void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index a5382b4..f23deea 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -42,7 +42,6 @@
     void setDataSource(
             const char *url, const KeyedVector<String8, String8> *headers);
 
-    void setVideoSurface(const sp<Surface> &surface);
     void setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
     void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink);
     void start();
@@ -68,6 +67,7 @@
     struct Renderer;
     struct Source;
     struct StreamingSource;
+    struct RTSPSource;
 
     enum {
         kWhatSetDataSource              = '=DaS',
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index b1e917d..5aa99bf 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -35,6 +35,7 @@
       mNumFramesDropped(0),
       mLooper(new ALooper),
       mState(UNINITIALIZED),
+      mAtEOS(false),
       mStartupSeekTimeUs(-1) {
     mLooper->setName("NuPlayerDriver Looper");
 
@@ -88,12 +89,6 @@
     return OK;
 }
 
-status_t NuPlayerDriver::setVideoSurface(const sp<Surface> &surface) {
-    mPlayer->setVideoSurface(surface);
-
-    return OK;
-}
-
 status_t NuPlayerDriver::setVideoSurfaceTexture(
         const sp<ISurfaceTexture> &surfaceTexture) {
     mPlayer->setVideoSurfaceTexture(surfaceTexture);
@@ -106,7 +101,7 @@
 }
 
 status_t NuPlayerDriver::prepareAsync() {
-    sendEvent(MEDIA_PREPARED);
+    notifyListener(MEDIA_PREPARED);
 
     return OK;
 }
@@ -117,6 +112,7 @@
             return INVALID_OPERATION;
         case STOPPED:
         {
+            mAtEOS = false;
             mPlayer->start();
 
             if (mStartupSeekTimeUs >= 0) {
@@ -173,7 +169,7 @@
 }
 
 bool NuPlayerDriver::isPlaying() {
-    return mState == PLAYING;
+    return mState == PLAYING && !mAtEOS;
 }
 
 status_t NuPlayerDriver::seekTo(int msec) {
@@ -190,6 +186,7 @@
         case PLAYING:
         case PAUSED:
         {
+            mAtEOS = false;
             mPlayer->seekToAsync(seekTimeUs);
             break;
         }
@@ -291,7 +288,7 @@
 }
 
 void NuPlayerDriver::notifySeekComplete() {
-    sendEvent(MEDIA_SEEK_COMPLETE);
+    notifyListener(MEDIA_SEEK_COMPLETE);
 }
 
 void NuPlayerDriver::notifyFrameStats(
@@ -320,4 +317,12 @@
     return OK;
 }
 
+void NuPlayerDriver::notifyListener(int msg, int ext1, int ext2) {
+    if (msg == MEDIA_PLAYBACK_COMPLETE || msg == MEDIA_ERROR) {
+        mAtEOS = true;
+    }
+
+    sendEvent(msg, ext1, ext2);
+}
+
 }  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 181c37d..4a0026c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -37,7 +37,6 @@
 
     virtual status_t setDataSource(const sp<IStreamSource> &source);
 
-    virtual status_t setVideoSurface(const sp<Surface> &surface);
     virtual status_t setVideoSurfaceTexture(
             const sp<ISurfaceTexture> &surfaceTexture);
     virtual status_t prepare();
@@ -67,6 +66,7 @@
     void notifyPosition(int64_t positionUs);
     void notifySeekComplete();
     void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped);
+    void notifyListener(int msg, int ext1 = 0, int ext2 = 0);
 
 protected:
     virtual ~NuPlayerDriver();
@@ -95,6 +95,7 @@
     };
 
     State mState;
+    bool mAtEOS;
 
     int64_t mStartupSeekTimeUs;
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 61a7ba4..640e9fa 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -219,7 +219,9 @@
 
 bool NuPlayer::Renderer::onDrainAudioQueue() {
     uint32_t numFramesPlayed;
-    CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
+    if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
+        return false;
+    }
 
     ssize_t numFramesAvailableToWrite =
         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 8a7eece..531b29f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -28,6 +28,7 @@
     Source() {}
 
     virtual void start() = 0;
+    virtual void stop() {}
 
     // Returns OK iff more data was available,
     // an error or ERROR_END_OF_STREAM if not.
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
new file mode 100644
index 0000000..e72adc4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTSPSource"
+#include <utils/Log.h>
+
+#include "RTSPSource.h"
+
+#include "AnotherPacketSource.h"
+#include "MyHandler.h"
+
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+NuPlayer::RTSPSource::RTSPSource(
+        const char *url,
+        const KeyedVector<String8, String8> *headers,
+        bool uidValid,
+        uid_t uid)
+    : mURL(url),
+      mUIDValid(uidValid),
+      mUID(uid),
+      mFlags(0),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mDisconnectReplyID(0) {
+    if (headers) {
+        mExtraHeaders = *headers;
+
+        ssize_t index =
+            mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+
+        if (index >= 0) {
+            mFlags |= kFlagIncognito;
+
+            mExtraHeaders.removeItemsAt(index);
+        }
+    }
+}
+
+NuPlayer::RTSPSource::~RTSPSource() {
+    if (mLooper != NULL) {
+        mLooper->stop();
+    }
+}
+
+void NuPlayer::RTSPSource::start() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtsp");
+        mLooper->start();
+
+        mReflector = new AHandlerReflector<RTSPSource>(this);
+        mLooper->registerHandler(mReflector);
+    }
+
+    CHECK(mHandler == NULL);
+
+    sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
+
+    mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID);
+    mLooper->registerHandler(mHandler);
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    mHandler->connect();
+}
+
+void NuPlayer::RTSPSource::stop() {
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, mReflector->id());
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer::RTSPSource::feedMoreTSData() {
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer::RTSPSource::getFormat(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+status_t NuPlayer::RTSPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        return finalResult == OK ? -EWOULDBLOCK : finalResult;
+    }
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+sp<AnotherPacketSource> NuPlayer::RTSPSource::getSource(bool audio) {
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {
+    *durationUs = 0ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
+    if (mState != CONNECTED) {
+        return UNKNOWN_ERROR;
+    }
+
+    mState = SEEKING;
+    mHandler->seek(seekTimeUs);
+
+    return OK;
+}
+
+bool NuPlayer::RTSPSource::isSeekable() {
+    return true;
+}
+
+void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
+    if (msg->what() == kWhatDisconnect) {
+        uint32_t replyID;
+        CHECK(msg->senderAwaitsResponse(&replyID));
+
+        mDisconnectReplyID = replyID;
+        finishDisconnectIfPossible();
+        return;
+    }
+
+    CHECK_EQ(msg->what(), (int)kWhatNotify);
+
+    int32_t what;
+    CHECK(msg->findInt32("what", &what));
+
+    switch (what) {
+        case MyHandler::kWhatConnected:
+            onConnected();
+            break;
+
+        case MyHandler::kWhatDisconnected:
+            onDisconnected(msg);
+            break;
+
+        case MyHandler::kWhatSeekDone:
+        {
+            mState = CONNECTED;
+            break;
+        }
+
+        case MyHandler::kWhatAccessUnit:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            sp<RefBase> obj;
+            CHECK(msg->findObject("accessUnit", &obj));
+
+            sp<ABuffer> accessUnit = static_cast<ABuffer *>(obj.get());
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                LOGI("dropping damaged access unit.");
+                break;
+            }
+
+            const TrackInfo &info = mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info.mSource;
+            if (source != NULL) {
+#if 1
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info.mRTPTime)
+                        / info.mTimeScale
+                        * 1000000ll
+                        + info.mNormalPlaytimeUs;
+
+                accessUnit->meta()->setInt64("timeUs", nptUs);
+#endif
+
+                source->queueAccessUnit(accessUnit);
+            }
+            break;
+        }
+
+        case MyHandler::kWhatEOS:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            int32_t finalResult;
+            CHECK(msg->findInt32("finalResult", &finalResult));
+            CHECK_NE(finalResult, (status_t)OK);
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                source->signalEOS(finalResult);
+            }
+
+            break;
+        }
+
+        case MyHandler::kWhatSeekDiscontinuity:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                source->queueDiscontinuity(ATSParser::DISCONTINUITY_SEEK, NULL);
+            }
+
+            break;
+        }
+
+        case MyHandler::kWhatNormalPlayTimeMapping:
+        {
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+            CHECK_LT(trackIndex, mTracks.size());
+
+            uint32_t rtpTime;
+            CHECK(msg->findInt32("rtpTime", (int32_t *)&rtpTime));
+
+            int64_t nptUs;
+            CHECK(msg->findInt64("nptUs", &nptUs));
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+            info->mRTPTime = rtpTime;
+            info->mNormalPlaytimeUs = nptUs;
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer::RTSPSource::onConnected() {
+    CHECK(mAudioTrack == NULL);
+    CHECK(mVideoTrack == NULL);
+
+    size_t numTracks = mHandler->countTracks();
+    for (size_t i = 0; i < numTracks; ++i) {
+        int32_t timeScale;
+        sp<MetaData> format = mHandler->getTrackFormat(i, &timeScale);
+
+        const char *mime;
+        CHECK(format->findCString(kKeyMIMEType, &mime));
+
+        bool isAudio = !strncasecmp(mime, "audio/", 6);
+        bool isVideo = !strncasecmp(mime, "video/", 6);
+
+        TrackInfo info;
+        info.mTimeScale = timeScale;
+        info.mRTPTime = 0;
+        info.mNormalPlaytimeUs = 0ll;
+
+        if ((isAudio && mAudioTrack == NULL)
+                || (isVideo && mVideoTrack == NULL)) {
+            sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+            if (isAudio) {
+                mAudioTrack = source;
+            } else {
+                mVideoTrack = source;
+            }
+
+            info.mSource = source;
+        }
+
+        mTracks.push(info);
+    }
+
+    mState = CONNECTED;
+}
+
+void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+    mLooper->unregisterHandler(mHandler->id());
+    mHandler.clear();
+
+    mState = DISCONNECTED;
+    mFinalResult = err;
+
+    if (mDisconnectReplyID != 0) {
+        finishDisconnectIfPossible();
+    }
+}
+
+void NuPlayer::RTSPSource::finishDisconnectIfPossible() {
+    if (mState != DISCONNECTED) {
+        mHandler->disconnect();
+        return;
+    }
+
+    (new AMessage)->postReply(mDisconnectReplyID);
+    mDisconnectReplyID = 0;
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
new file mode 100644
index 0000000..66eab72
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTSP_SOURCE_H_
+
+#define RTSP_SOURCE_H_
+
+#include "NuPlayerSource.h"
+
+#include <media/stagefright/foundation/AHandlerReflector.h>
+
+namespace android {
+
+struct ALooper;
+struct AnotherPacketSource;
+struct MyHandler;
+
+struct NuPlayer::RTSPSource : public NuPlayer::Source {
+    RTSPSource(
+            const char *url,
+            const KeyedVector<String8, String8> *headers,
+            bool uidValid = false,
+            uid_t uid = 0);
+
+    virtual void start();
+    virtual void stop();
+
+    virtual status_t feedMoreTSData();
+
+    virtual sp<MetaData> getFormat(bool audio);
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(int64_t seekTimeUs);
+    virtual bool isSeekable();
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+    virtual ~RTSPSource();
+
+private:
+    enum {
+        kWhatNotify          = 'noti',
+        kWhatDisconnect      = 'disc',
+    };
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        SEEKING,
+    };
+
+    enum Flags {
+        // Don't log any URLs.
+        kFlagIncognito = 1,
+    };
+
+    struct TrackInfo {
+        sp<AnotherPacketSource> mSource;
+
+        int32_t mTimeScale;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+    };
+
+    AString mURL;
+    KeyedVector<String8, String8> mExtraHeaders;
+    bool mUIDValid;
+    uid_t mUID;
+    uint32_t mFlags;
+    State mState;
+    status_t mFinalResult;
+    uint32_t mDisconnectReplyID;
+
+    sp<ALooper> mLooper;
+    sp<AHandlerReflector<RTSPSource> > mReflector;
+    sp<MyHandler> mHandler;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    void onConnected();
+    void onDisconnected(const sp<AMessage> &msg);
+    void finishDisconnectIfPossible();
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
+};
+
+}  // namespace android
+
+#endif  // RTSP_SOURCE_H_
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 9cb18de..d947760 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -681,6 +681,10 @@
     static const MimeToRole kMimeToRole[] = {
         { MEDIA_MIMETYPE_AUDIO_MPEG,
             "audio_decoder.mp3", "audio_encoder.mp3" },
+        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+            "audio_decoder.mp1", "audio_encoder.mp1" },
+        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+            "audio_decoder.mp2", "audio_encoder.mp2" },
         { MEDIA_MIMETYPE_AUDIO_AMR_NB,
             "audio_decoder.amrnb", "audio_encoder.amrnb" },
         { MEDIA_MIMETYPE_AUDIO_AMR_WB,
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 0b1a2af..0aeb515 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -58,7 +58,6 @@
         $(TOP)/frameworks/base/include/media/stagefright/openmax \
         $(TOP)/external/flac/include \
         $(TOP)/external/tremolo \
-        $(TOP)/frameworks/base/media/libstagefright/rtsp \
         $(TOP)/external/openssl/include \
 
 LOCAL_SHARED_LIBRARIES := \
@@ -88,7 +87,6 @@
         libvpx \
         libstagefright_mpeg2ts \
         libstagefright_httplive \
-        libstagefright_rtsp \
         libstagefright_id3 \
         libFLAC \
 
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 1165af5..f6d054d 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -22,7 +22,6 @@
 
 #include <dlfcn.h>
 
-#include "include/ARTSPController.h"
 #include "include/AwesomePlayer.h"
 #include "include/DRMExtractor.h"
 #include "include/SoftwareRenderer.h"
@@ -53,7 +52,6 @@
 #include <gui/SurfaceTextureClient.h>
 #include <surfaceflinger/ISurfaceComposer.h>
 
-#include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
 
 #include <cutils/properties.h>
@@ -65,7 +63,6 @@
 
 static int64_t kLowWaterMarkUs = 2000000ll;  // 2secs
 static int64_t kHighWaterMarkUs = 5000000ll;  // 5secs
-static int64_t kHighWaterMarkRTSPUs = 4000000ll;  // 4secs
 static const size_t kLowWaterMarkBytes = 40000;
 static const size_t kHighWaterMarkBytes = 200000;
 
@@ -227,17 +224,18 @@
     mClient.disconnect();
 }
 
-void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) {
+void AwesomePlayer::cancelPlayerEvents(bool keepNotifications) {
     mQueue.cancelEvent(mVideoEvent->eventID());
     mVideoEventPending = false;
-    mQueue.cancelEvent(mStreamDoneEvent->eventID());
-    mStreamDoneEventPending = false;
-    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
-    mAudioStatusEventPending = false;
     mQueue.cancelEvent(mVideoLagEvent->eventID());
     mVideoLagEventPending = false;
 
-    if (!keepBufferingGoing) {
+    if (!keepNotifications) {
+        mQueue.cancelEvent(mStreamDoneEvent->eventID());
+        mStreamDoneEventPending = false;
+        mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
+        mAudioStatusEventPending = false;
+
         mQueue.cancelEvent(mBufferingEvent->eventID());
         mBufferingEventPending = false;
     }
@@ -388,10 +386,12 @@
     for (size_t i = 0; i < extractor->countTracks(); ++i) {
         sp<MetaData> meta = extractor->getTrackMetaData(i);
 
-        const char *mime;
-        CHECK(meta->findCString(kKeyMIMEType, &mime));
+        const char *_mime;
+        CHECK(meta->findCString(kKeyMIMEType, &_mime));
 
-        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
+        String8 mime = String8(_mime);
+
+        if (!haveVideo && !strncasecmp(mime.string(), "video/", 6)) {
             setVideoSource(extractor->getTrack(i));
             haveVideo = true;
 
@@ -412,9 +412,9 @@
                 mStats.mTracks.push();
                 TrackStat *stat =
                     &mStats.mTracks.editItemAt(mStats.mVideoTrackIndex);
-                stat->mMIME = mime;
+                stat->mMIME = mime.string();
             }
-        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
+        } else if (!haveAudio && !strncasecmp(mime.string(), "audio/", 6)) {
             setAudioSource(extractor->getTrack(i));
             haveAudio = true;
 
@@ -424,10 +424,10 @@
                 mStats.mTracks.push();
                 TrackStat *stat =
                     &mStats.mTracks.editItemAt(mStats.mAudioTrackIndex);
-                stat->mMIME = mime;
+                stat->mMIME = mime.string();
             }
 
-            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+            if (!strcasecmp(mime.string(), MEDIA_MIMETYPE_AUDIO_VORBIS)) {
                 // Only do this for vorbis audio, none of the other audio
                 // formats even support this ringtone specific hack and
                 // retrieving the metadata on some extractors may turn out
@@ -439,7 +439,7 @@
                     modifyFlags(AUTO_LOOPING, SET);
                 }
             }
-        } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+        } else if (!strcasecmp(mime.string(), MEDIA_MIMETYPE_TEXT_3GPP)) {
             addTextSource(extractor->getTrack(i));
         }
     }
@@ -485,9 +485,6 @@
         if (mConnectingDataSource != NULL) {
             LOGI("interrupting the connection process");
             mConnectingDataSource->disconnect();
-        } else if (mConnectingRTSPController != NULL) {
-            LOGI("interrupting the connection process");
-            mConnectingRTSPController->disconnect();
         }
 
         if (mFlags & PREPARING_CONNECTED) {
@@ -534,11 +531,6 @@
 
     mVideoRenderer.clear();
 
-    if (mRTSPController != NULL) {
-        mRTSPController->disconnect();
-        mRTSPController.clear();
-    }
-
     if (mVideoSource != NULL) {
         shutdownVideoDecoder_l();
     }
@@ -612,10 +604,7 @@
 bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) {
     int64_t bitrate;
 
-    if (mRTSPController != NULL) {
-        *durationUs = mRTSPController->getQueueDurationUs(eos);
-        return true;
-    } else if (mCachedSource != NULL && getBitrate(&bitrate)) {
+    if (mCachedSource != NULL && getBitrate(&bitrate)) {
         status_t finalStatus;
         size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
         *durationUs = cachedDataRemaining * 8000000ll / bitrate;
@@ -751,9 +740,6 @@
         LOGV("cachedDurationUs = %.2f secs, eos=%d",
              cachedDurationUs / 1E6, eos);
 
-        int64_t highWaterMarkUs =
-            (mRTSPController != NULL) ? kHighWaterMarkRTSPUs : kHighWaterMarkUs;
-
         if ((mFlags & PLAYING) && !eos
                 && (cachedDurationUs < kLowWaterMarkUs)) {
             LOGI("cache is running low (%.2f secs) , pausing.",
@@ -763,7 +749,7 @@
             ensureCacheIsFetching_l();
             sendCacheStats();
             notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
-        } else if (eos || cachedDurationUs > highWaterMarkUs) {
+        } else if (eos || cachedDurationUs > kHighWaterMarkUs) {
             if (mFlags & CACHE_UNDERRUN) {
                 LOGI("cache has filled up (%.2f secs), resuming.",
                      cachedDurationUs / 1E6);
@@ -1081,7 +1067,8 @@
 
     if (USE_SURFACE_ALLOC
             && !strncmp(component, "OMX.", 4)
-            && strncmp(component, "OMX.google.", 11)) {
+            && strncmp(component, "OMX.google.", 11)
+            && strcmp(component, "OMX.Nvidia.mpeg2v.decode")) {
         // Hardware decoders avoid the CPU color conversion by decoding
         // directly to ANativeBuffers, so we must use a renderer that
         // just pushes those buffers to the ANativeWindow.
@@ -1109,7 +1096,7 @@
         return OK;
     }
 
-    cancelPlayerEvents(true /* keepBufferingGoing */);
+    cancelPlayerEvents(true /* keepNotifications */);
 
     if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
         if (at_eos) {
@@ -1153,18 +1140,9 @@
     return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
 }
 
-status_t AwesomePlayer::setSurface(const sp<Surface> &surface) {
-    Mutex::Autolock autoLock(mLock);
-
-    mSurface = surface;
-    return setNativeWindow_l(surface);
-}
-
 status_t AwesomePlayer::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
     Mutex::Autolock autoLock(mLock);
 
-    mSurface.clear();
-
     status_t err;
     if (surfaceTexture != NULL) {
         err = setNativeWindow_l(new SurfaceTextureClient(surfaceTexture));
@@ -1263,10 +1241,7 @@
 }
 
 status_t AwesomePlayer::getPosition(int64_t *positionUs) {
-    if (mRTSPController != NULL) {
-        *positionUs = mRTSPController->getNormalPlayTimeUs();
-    }
-    else if (mSeeking != NO_SEEK) {
+    if (mSeeking != NO_SEEK) {
         *positionUs = mSeekTimeUs;
     } else if (mVideoSource != NULL
             && (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
@@ -1316,25 +1291,7 @@
     }
 }
 
-// static
-void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) {
-    static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone();
-}
-
-void AwesomePlayer::onRTSPSeekDone() {
-    if (!mSeekNotificationSent) {
-        notifyListener_l(MEDIA_SEEK_COMPLETE);
-        mSeekNotificationSent = true;
-    }
-}
-
 status_t AwesomePlayer::seekTo_l(int64_t timeUs) {
-    if (mRTSPController != NULL) {
-        mSeekNotificationSent = false;
-        mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this);
-        return OK;
-    }
-
     if (mFlags & CACHE_UNDERRUN) {
         modifyFlags(CACHE_UNDERRUN, CLEAR);
         play_l();
@@ -1770,7 +1727,6 @@
         int64_t latenessUs = nowUs - timeUs;
 
         if (latenessUs > 500000ll
-                && mRTSPController == NULL
                 && mAudioPlayer != NULL
                 && mAudioPlayer->getMediaTimeMapping(
                     &realTimeUs, &mediaTimeUs)) {
@@ -2085,34 +2041,6 @@
                 return UNKNOWN_ERROR;
             }
         }
-    } else if (!strncasecmp("rtsp://", mUri.string(), 7)) {
-        if (mLooper == NULL) {
-            mLooper = new ALooper;
-            mLooper->setName("rtsp");
-            mLooper->start();
-        }
-        mRTSPController = new ARTSPController(mLooper);
-        mConnectingRTSPController = mRTSPController;
-
-        if (mUIDValid) {
-            mConnectingRTSPController->setUID(mUID);
-        }
-
-        mLock.unlock();
-        status_t err = mRTSPController->connect(mUri.string());
-        mLock.lock();
-
-        mConnectingRTSPController.clear();
-
-        LOGI("ARTSPController::connect returned %d", err);
-
-        if (err != OK) {
-            mRTSPController.clear();
-            return err;
-        }
-
-        sp<MediaExtractor> extractor = mRTSPController.get();
-        return setDataSource_l(extractor);
     } else {
         dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
     }
@@ -2224,7 +2152,7 @@
 
     modifyFlags(PREPARING_CONNECTED, SET);
 
-    if (isStreamingHTTP() || mRTSPController != NULL) {
+    if (isStreamingHTTP()) {
         postBufferingEvent_l();
     } else {
         finishAsyncPrepare_l();
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 1ba79e5..e4de20a 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -257,6 +257,12 @@
             mForceRead = false;
             *timestampUs =
                 mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+
+            // Really make sure that this video recording frame will not be dropped.
+            if (*timestampUs < mStartTimeUs) {
+                LOGI("set timestampUs to start time stamp %lld us", mStartTimeUs);
+                *timestampUs = mStartTimeUs;
+            }
             return false;
         }
     }
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index c16b3b5..70523c1 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -20,6 +20,7 @@
 #include "include/MPEG4Extractor.h"
 #include "include/WAVExtractor.h"
 #include "include/OggExtractor.h"
+#include "include/MPEG2PSExtractor.h"
 #include "include/MPEG2TSExtractor.h"
 #include "include/NuCachedSource2.h"
 #include "include/HTTPBase.h"
@@ -113,6 +114,7 @@
     RegisterSniffer(SniffMP3);
     RegisterSniffer(SniffAAC);
     RegisterSniffer(SniffAVI);
+    RegisterSniffer(SniffMPEG2PS);
 
     char value[PROPERTY_VALUE_MAX];
     if (property_get("drm.service.enabled", value, NULL)
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 92e84c2..34e9cd7 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -25,11 +25,11 @@
 #include "include/VBRISeeker.h"
 #include "include/XINGSeeker.h"
 
+#include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDebug.h>
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MediaSource.h>
@@ -289,9 +289,24 @@
     GetMPEGAudioFrameSize(
             header, &frame_size, &sample_rate, &num_channels, &bitrate);
 
+    unsigned layer = 4 - ((header >> 17) & 3);
+
     mMeta = new MetaData;
 
-    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+    switch (layer) {
+        case 1:
+            mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I);
+            break;
+        case 2:
+            mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II);
+            break;
+        case 3:
+            mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+            break;
+        default:
+            TRESPASS();
+    }
+
     mMeta->setInt32(kKeySampleRate, sample_rate);
     mMeta->setInt32(kKeyBitRate, bitrate * 1000);
     mMeta->setInt32(kKeyChannelCount, num_channels);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 1e24599..1ebf0a8 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1136,6 +1136,41 @@
             break;
         }
 
+        // @xyz
+        case FOURCC('\xA9', 'x', 'y', 'z'):
+        {
+            // Best case the total data length inside "@xyz" box
+            // would be 8, for instance "@xyz" + "\x00\x04\x15\xc7" + "0+0/",
+            // where "\x00\x04" is the text string length with value = 4,
+            // "\0x15\xc7" is the language code = en, and "0+0" is a
+            // location (string) value with longitude = 0 and latitude = 0.
+            if (chunk_data_size < 8) {
+                return ERROR_MALFORMED;
+            }
+
+            // Worst case the location string length would be 18,
+            // for instance +90.0000-180.0000, without the trailing "/" and
+            // the string length + language code.
+            char buffer[18];
+
+            // Substracting 5 from the data size is because the text string length +
+            // language code takes 4 bytes, and the trailing slash "/" takes 1 byte.
+            off64_t location_length = chunk_data_size - 5;
+            if (location_length >= (off64_t) sizeof(buffer)) {
+                return ERROR_MALFORMED;
+            }
+
+            if (mDataSource->readAt(
+                        data_offset + 4, buffer, location_length) < location_length) {
+                return ERROR_IO;
+            }
+
+            buffer[location_length] = '\0';
+            mFileMetaData->setCString(kKeyLocation, buffer);
+            *offset += chunk_size;
+            break;
+        }
+
         case FOURCC('e', 's', 'd', 's'):
         {
             if (chunk_data_size < 4) {
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 01f1fba..444e823 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -30,6 +30,8 @@
 const char *MEDIA_MIMETYPE_AUDIO_AMR_NB = "audio/3gpp";
 const char *MEDIA_MIMETYPE_AUDIO_AMR_WB = "audio/amr-wb";
 const char *MEDIA_MIMETYPE_AUDIO_MPEG = "audio/mpeg";
+const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I = "audio/mpeg-L1";
+const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II = "audio/mpeg-L2";
 const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
 const char *MEDIA_MIMETYPE_AUDIO_QCELP = "audio/qcelp";
 const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
@@ -45,6 +47,7 @@
 const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska";
 const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
 const char *MEDIA_MIMETYPE_CONTAINER_AVI = "video/avi";
+const char *MEDIA_MIMETYPE_CONTAINER_MPEG2PS = "video/mp2p";
 
 const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
 
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index a8023df..2221268 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -24,6 +24,7 @@
 #include "include/MPEG4Extractor.h"
 #include "include/WAVExtractor.h"
 #include "include/OggExtractor.h"
+#include "include/MPEG2PSExtractor.h"
 #include "include/MPEG2TSExtractor.h"
 #include "include/DRMExtractor.h"
 #include "include/WVMExtractor.h"
@@ -115,6 +116,8 @@
         ret = new WVMExtractor(source);
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC_ADTS)) {
         ret = new AACExtractor(source);
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2PS)) {
+        ret = new MPEG2PSExtractor(source);
     }
 
     if (ret != NULL) {
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index b20bfcb..dfd3f4a 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -109,6 +109,7 @@
     { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
 //    { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
     { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.google.mp3.decoder" },
+    { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, "OMX.Nvidia.mp2.decoder" },
 //    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
 //    { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
     { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.google.amrnb.decoder" },
@@ -1471,7 +1472,9 @@
       mOutputPortSettingsChangedPending(false),
       mLeftOverBuffer(NULL),
       mPaused(false),
-      mNativeWindow(!strncmp(componentName, "OMX.google.", 11)
+      mNativeWindow(
+              (!strncmp(componentName, "OMX.google.", 11)
+              || !strcmp(componentName, "OMX.Nvidia.mpeg2v.decode"))
                         ? NULL : nativeWindow) {
     mPortStatus[kPortIndexInput] = ENABLED;
     mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1492,6 +1495,12 @@
     static const MimeToRole kMimeToRole[] = {
         { MEDIA_MIMETYPE_AUDIO_MPEG,
             "audio_decoder.mp3", "audio_encoder.mp3" },
+        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+            "audio_decoder.mp1", "audio_encoder.mp1" },
+        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+            "audio_decoder.mp2", "audio_encoder.mp2" },
+        { MEDIA_MIMETYPE_AUDIO_MPEG,
+            "audio_decoder.mp3", "audio_encoder.mp3" },
         { MEDIA_MIMETYPE_AUDIO_AMR_NB,
             "audio_decoder.amrnb", "audio_encoder.amrnb" },
         { MEDIA_MIMETYPE_AUDIO_AMR_WB,
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index f693c72..2505096 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -38,7 +38,7 @@
         ".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
         ".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
         ".mkv", ".mka", ".webm", ".ts", ".fl", ".flac", ".mxmf",
-        ".avi",
+        ".avi", ".mpeg", ".mpg"
     };
     static const size_t kNumValidExtensions =
         sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index c74cb5a..4491c97 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -418,6 +418,7 @@
         { kKeyYear, METADATA_KEY_YEAR },
         { kKeyWriter, METADATA_KEY_WRITER },
         { kKeyCompilation, METADATA_KEY_COMPILATION },
+        { kKeyLocation, METADATA_KEY_LOCATION },
     };
     static const size_t kNumMapEntries = sizeof(kMap) / sizeof(kMap[0]);
 
diff --git a/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
index 94a79ab..d361ef4 100644
--- a/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/AMRNBEncoder.cpp
@@ -82,7 +82,11 @@
                 &mEncState, &mSidState, false /* dtx_enable */),
              0);
 
-    mSource->start(params);
+    status_t err = mSource->start(params);
+    if (err != OK) {
+        LOGE("AudioSource is not available");
+        return err;
+    }
 
     mAnchorTimeUs = 0;
     mNumFramesOutput = 0;
diff --git a/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp b/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
index 002f055..5eacc16 100644
--- a/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
+++ b/media/libstagefright/codecs/amrwbenc/AMRWBEncoder.cpp
@@ -137,8 +137,12 @@
     CHECK_EQ(OK, initCheck());
 
     mNumFramesOutput = 0;
-    mSource->start(params);
 
+    status_t err = mSource->start(params);
+    if (err != OK) {
+        LOGE("AudioSource is not available");
+        return err;
+    }
     mStarted = true;
 
     return OK;
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index ddced5f..aa07e57 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -421,8 +421,13 @@
 
         int32_t bufferSize = inHeader->nFilledLen;
 
+        // The PV decoder is lying to us, sometimes it'll claim to only have
+        // consumed a subset of the buffer when it clearly consumed all of it.
+        // ignore whatever it says...
+        int32_t tmp = bufferSize;
+
         if (PVDecodeVideoFrame(
-                    mHandle, &bitstream, &timestamp, &bufferSize,
+                    mHandle, &bitstream, &timestamp, &tmp,
                     &useExtTimestamp,
                     outHeader->pBuffer) != PV_TRUE) {
             LOGE("failed to decode video frame.");
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 740c957..dede3ac 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -76,7 +76,8 @@
       mPicId(0),
       mHeadersDecoded(false),
       mEOSStatus(INPUT_DATA_AVAILABLE),
-      mOutputPortSettingsChange(NONE) {
+      mOutputPortSettingsChange(NONE),
+      mSignalledError(false) {
     initPorts();
     CHECK_EQ(initDecoder(), (status_t)OK);
 }
@@ -287,7 +288,7 @@
 }
 
 void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
-    if (mOutputPortSettingsChange != NONE) {
+    if (mSignalledError || mOutputPortSettingsChange != NONE) {
         return;
     }
 
@@ -298,7 +299,6 @@
     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
     H264SwDecRet ret = H264SWDEC_PIC_RDY;
-    status_t err = OK;
     bool portSettingsChanged = false;
     while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
             && outQueue.size() == kNumOutputBuffers) {
@@ -372,7 +372,12 @@
                 inPicture.dataLen = 0;
                 if (ret < 0) {
                     LOGE("Decoder failed: %d", ret);
-                    err = ERROR_MALFORMED;
+
+                    notify(OMX_EventError, OMX_ErrorUndefined,
+                           ERROR_MALFORMED, NULL);
+
+                    mSignalledError = true;
+                    return;
                 }
             }
         }
@@ -400,10 +405,6 @@
             uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
             drainOneOutputBuffer(picId, data);
         }
-
-        if (err != OK) {
-            notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
-        }
     }
 }
 
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index 1cc85e8..879b014 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -88,6 +88,8 @@
     };
     OutputPortSettingChange mOutputPortSettingsChange;
 
+    bool mSignalledError;
+
     void initPorts();
     status_t initDecoder();
     void updatePortDefinitions();
diff --git a/media/libstagefright/include/ARTSPController.h b/media/libstagefright/include/ARTSPController.h
deleted file mode 100644
index 2bd5be6..0000000
--- a/media/libstagefright/include/ARTSPController.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef A_RTSP_CONTROLLER_H_
-
-#define A_RTSP_CONTROLLER_H_
-
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/foundation/AHandlerReflector.h>
-#include <media/stagefright/MediaExtractor.h>
-
-namespace android {
-
-struct ALooper;
-struct MyHandler;
-
-struct ARTSPController : public MediaExtractor {
-    ARTSPController(const sp<ALooper> &looper);
-
-    void setUID(uid_t uid);
-
-    status_t connect(const char *url);
-    void disconnect();
-
-    void seekAsync(int64_t timeUs, void (*seekDoneCb)(void *), void *cookie);
-
-    virtual size_t countTracks();
-    virtual sp<MediaSource> getTrack(size_t index);
-
-    virtual sp<MetaData> getTrackMetaData(
-            size_t index, uint32_t flags);
-
-    int64_t getNormalPlayTimeUs();
-    int64_t getQueueDurationUs(bool *eos);
-
-    void onMessageReceived(const sp<AMessage> &msg);
-
-    virtual uint32_t flags() const {
-        // Seeking 10secs forward or backward is a very expensive operation
-        // for rtsp, so let's not enable that.
-        // The user can always use the seek bar.
-
-        return CAN_PAUSE | CAN_SEEK;
-    }
-
-protected:
-    virtual ~ARTSPController();
-
-private:
-    enum {
-        kWhatConnectDone    = 'cdon',
-        kWhatDisconnectDone = 'ddon',
-        kWhatSeekDone       = 'sdon',
-    };
-
-    enum State {
-        DISCONNECTED,
-        CONNECTED,
-        CONNECTING,
-    };
-
-    Mutex mLock;
-    Condition mCondition;
-
-    State mState;
-    status_t mConnectionResult;
-
-    sp<ALooper> mLooper;
-    sp<MyHandler> mHandler;
-    sp<AHandlerReflector<ARTSPController> > mReflector;
-
-    bool mUIDValid;
-    uid_t mUID;
-
-    void (*mSeekDoneCb)(void *);
-    void *mSeekDoneCookie;
-    int64_t mLastSeekCompletedTimeUs;
-
-    DISALLOW_EVIL_CONSTRUCTORS(ARTSPController);
-};
-
-}  // namespace android
-
-#endif  // A_RTSP_CONTROLLER_H_
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 8e73121..0985f47 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -38,9 +38,6 @@
 struct NuCachedSource2;
 struct ISurfaceTexture;
 
-struct ALooper;
-struct ARTSPController;
-
 class DrmManagerClinet;
 class DecryptHandle;
 
@@ -84,7 +81,6 @@
 
     bool isPlaying() const;
 
-    status_t setSurface(const sp<Surface> &surface);
     status_t setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
     void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
     status_t setLooping(bool shouldLoop);
@@ -157,7 +153,6 @@
     bool mUIDValid;
     uid_t mUID;
 
-    sp<Surface> mSurface;
     sp<ANativeWindow> mNativeWindow;
     sp<MediaPlayerBase::AudioSink> mAudioSink;
 
@@ -233,10 +228,6 @@
     sp<HTTPBase> mConnectingDataSource;
     sp<NuCachedSource2> mCachedSource;
 
-    sp<ALooper> mLooper;
-    sp<ARTSPController> mRTSPController;
-    sp<ARTSPController> mConnectingRTSPController;
-
     DrmManagerClient *mDrmManagerClient;
     sp<DecryptHandle> mDecryptHandle;
 
@@ -259,7 +250,7 @@
     void notifyVideoSize_l();
     void seekAudioIfNecessary_l();
 
-    void cancelPlayerEvents(bool keepBufferingGoing = false);
+    void cancelPlayerEvents(bool keepNotifications = false);
 
     void setAudioSource(sp<MediaSource> source);
     status_t initAudioDecoder();
@@ -287,9 +278,6 @@
 
     static bool ContinuePreparation(void *cookie);
 
-    static void OnRTSPSeekDoneWrapper(void *cookie);
-    void onRTSPSeekDone();
-
     bool getBitrate(int64_t *bitrate);
 
     void finishSeekIfNecessary(int64_t videoTimeUs);
diff --git a/media/libstagefright/include/MPEG2PSExtractor.h b/media/libstagefright/include/MPEG2PSExtractor.h
new file mode 100644
index 0000000..fb76564
--- /dev/null
+++ b/media/libstagefright/include/MPEG2PSExtractor.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MPEG2_PS_EXTRACTOR_H_
+
+#define MPEG2_PS_EXTRACTOR_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <utils/threads.h>
+#include <utils/KeyedVector.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+struct Track;
+struct String8;
+
+struct MPEG2PSExtractor : public MediaExtractor {
+    MPEG2PSExtractor(const sp<DataSource> &source);
+
+    virtual size_t countTracks();
+    virtual sp<MediaSource> getTrack(size_t index);
+    virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+    virtual sp<MetaData> getMetaData();
+
+    virtual uint32_t flags() const;
+
+protected:
+    virtual ~MPEG2PSExtractor();
+
+private:
+    struct Track;
+    struct WrappedTrack;
+
+    mutable Mutex mLock;
+    sp<DataSource> mDataSource;
+
+    off64_t mOffset;
+    status_t mFinalResult;
+    sp<ABuffer> mBuffer;
+    KeyedVector<unsigned, sp<Track> > mTracks;
+    bool mScanning;
+
+    bool mProgramStreamMapValid;
+    KeyedVector<unsigned, unsigned> mStreamTypeByESID;
+
+    status_t feedMore();
+
+    status_t dequeueChunk();
+    ssize_t dequeuePack();
+    ssize_t dequeueSystemHeader();
+    ssize_t dequeuePES();
+
+    DISALLOW_EVIL_CONSTRUCTORS(MPEG2PSExtractor);
+};
+
+bool SniffMPEG2PS(
+        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+        sp<AMessage> *);
+
+}  // namespace android
+
+#endif  // MPEG2_PS_EXTRACTOR_H_
+
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 388cb54..878e534 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -64,12 +64,9 @@
 
     bool PTSTimeDeltaEstablished();
 
-protected:
-    virtual ~ATSParser();
-
-private:
     enum {
         // From ISO/IEC 13818-1: 2000 (E), Table 2-29
+        STREAMTYPE_RESERVED             = 0x00,
         STREAMTYPE_MPEG1_VIDEO          = 0x01,
         STREAMTYPE_MPEG2_VIDEO          = 0x02,
         STREAMTYPE_MPEG1_AUDIO          = 0x03,
@@ -79,6 +76,10 @@
         STREAMTYPE_H264                 = 0x1b,
     };
 
+protected:
+    virtual ~ATSParser();
+
+private:
     struct Program;
     struct Stream;
 
diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk
index 4a30416..578c669 100644
--- a/media/libstagefright/mpeg2ts/Android.mk
+++ b/media/libstagefright/mpeg2ts/Android.mk
@@ -6,6 +6,7 @@
         AnotherPacketSource.cpp   \
         ATSParser.cpp             \
         ESQueue.cpp               \
+        MPEG2PSExtractor.cpp      \
         MPEG2TSExtractor.cpp      \
 
 LOCAL_C_INCLUDES:= \
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index a56da36..b9a4826 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -585,6 +585,8 @@
         return NULL;
     }
 
+    unsigned layer = 4 - ((header >> 17) & 3);
+
     sp<ABuffer> accessUnit = new ABuffer(frameSize);
     memcpy(accessUnit->data(), data, frameSize);
 
@@ -601,7 +603,24 @@
 
     if (mFormat == NULL) {
         mFormat = new MetaData;
-        mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+
+        switch (layer) {
+            case 1:
+                mFormat->setCString(
+                        kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I);
+                break;
+            case 2:
+                mFormat->setCString(
+                        kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II);
+                break;
+            case 3:
+                mFormat->setCString(
+                        kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
+                break;
+            default:
+                TRESPASS();
+        }
+
         mFormat->setInt32(kKeySampleRate, samplingRate);
         mFormat->setInt32(kKeyChannelCount, numChannels);
     }
diff --git a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
new file mode 100644
index 0000000..f55be6e
--- /dev/null
+++ b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
@@ -0,0 +1,715 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MPEG2PSExtractor"
+#include <utils/Log.h>
+
+#include "include/MPEG2PSExtractor.h"
+
+#include "AnotherPacketSource.h"
+#include "ESQueue.h"
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct MPEG2PSExtractor::Track : public MediaSource {
+    Track(MPEG2PSExtractor *extractor,
+          unsigned stream_id, unsigned stream_type);
+
+    virtual status_t start(MetaData *params);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options);
+
+protected:
+    virtual ~Track();
+
+private:
+    friend struct MPEG2PSExtractor;
+
+    MPEG2PSExtractor *mExtractor;
+
+    unsigned mStreamID;
+    unsigned mStreamType;
+    ElementaryStreamQueue *mQueue;
+    sp<AnotherPacketSource> mSource;
+
+    status_t appendPESData(
+            unsigned PTS_DTS_flags,
+            uint64_t PTS, uint64_t DTS,
+            const uint8_t *data, size_t size);
+
+    DISALLOW_EVIL_CONSTRUCTORS(Track);
+};
+
+struct MPEG2PSExtractor::WrappedTrack : public MediaSource {
+    WrappedTrack(const sp<MPEG2PSExtractor> &extractor, const sp<Track> &track);
+
+    virtual status_t start(MetaData *params);
+    virtual status_t stop();
+    virtual sp<MetaData> getFormat();
+
+    virtual status_t read(
+            MediaBuffer **buffer, const ReadOptions *options);
+
+protected:
+    virtual ~WrappedTrack();
+
+private:
+    sp<MPEG2PSExtractor> mExtractor;
+    sp<MPEG2PSExtractor::Track> mTrack;
+
+    DISALLOW_EVIL_CONSTRUCTORS(WrappedTrack);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG2PSExtractor::MPEG2PSExtractor(const sp<DataSource> &source)
+    : mDataSource(source),
+      mOffset(0),
+      mFinalResult(OK),
+      mBuffer(new ABuffer(0)),
+      mScanning(true),
+      mProgramStreamMapValid(false) {
+    for (size_t i = 0; i < 500; ++i) {
+        if (feedMore() != OK) {
+            break;
+        }
+    }
+
+    // Remove all tracks that were unable to determine their format.
+    for (size_t i = mTracks.size(); i-- > 0;) {
+        if (mTracks.valueAt(i)->getFormat() == NULL) {
+            mTracks.removeItemsAt(i);
+        }
+    }
+
+    mScanning = false;
+}
+
+MPEG2PSExtractor::~MPEG2PSExtractor() {
+}
+
+size_t MPEG2PSExtractor::countTracks() {
+    return mTracks.size();
+}
+
+sp<MediaSource> MPEG2PSExtractor::getTrack(size_t index) {
+    if (index >= mTracks.size()) {
+        return NULL;
+    }
+
+    return new WrappedTrack(this, mTracks.valueAt(index));
+}
+
+sp<MetaData> MPEG2PSExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+    if (index >= mTracks.size()) {
+        return NULL;
+    }
+
+    return mTracks.valueAt(index)->getFormat();
+}
+
+sp<MetaData> MPEG2PSExtractor::getMetaData() {
+    sp<MetaData> meta = new MetaData;
+    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_MPEG2PS);
+
+    return meta;
+}
+
+uint32_t MPEG2PSExtractor::flags() const {
+    return CAN_PAUSE;
+}
+
+status_t MPEG2PSExtractor::feedMore() {
+    Mutex::Autolock autoLock(mLock);
+
+    // How much data we're reading at a time
+    static const size_t kChunkSize = 8192;
+
+    for (;;) {
+        status_t err = dequeueChunk();
+
+        if (err == -EAGAIN && mFinalResult == OK) {
+            memmove(mBuffer->base(), mBuffer->data(), mBuffer->size());
+            mBuffer->setRange(0, mBuffer->size());
+
+            if (mBuffer->size() + kChunkSize > mBuffer->capacity()) {
+                size_t newCapacity = mBuffer->capacity() + kChunkSize;
+                sp<ABuffer> newBuffer = new ABuffer(newCapacity);
+                memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size());
+                newBuffer->setRange(0, mBuffer->size());
+                mBuffer = newBuffer;
+            }
+
+            ssize_t n = mDataSource->readAt(
+                    mOffset, mBuffer->data() + mBuffer->size(), kChunkSize);
+
+            if (n < (ssize_t)kChunkSize) {
+                mFinalResult = (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;
+                return mFinalResult;
+            }
+
+            mBuffer->setRange(mBuffer->offset(), mBuffer->size() + n);
+            mOffset += n;
+        } else if (err != OK) {
+            mFinalResult = err;
+            return err;
+        } else {
+            return OK;
+        }
+    }
+}
+
+status_t MPEG2PSExtractor::dequeueChunk() {
+    if (mBuffer->size() < 4) {
+        return -EAGAIN;
+    }
+
+    if (memcmp("\x00\x00\x01", mBuffer->data(), 3)) {
+        return ERROR_MALFORMED;
+    }
+
+    unsigned chunkType = mBuffer->data()[3];
+
+    ssize_t res;
+
+    switch (chunkType) {
+        case 0xba:
+        {
+            res = dequeuePack();
+            break;
+        }
+
+        case 0xbb:
+        {
+            res = dequeueSystemHeader();
+            break;
+        }
+
+        default:
+        {
+            res = dequeuePES();
+            break;
+        }
+    }
+
+    if (res > 0) {
+        if (mBuffer->size() < (size_t)res) {
+            return -EAGAIN;
+        }
+
+        mBuffer->setRange(mBuffer->offset() + res, mBuffer->size() - res);
+        res = OK;
+    }
+
+    return res;
+}
+
+ssize_t MPEG2PSExtractor::dequeuePack() {
+    // 32 + 2 + 3 + 1 + 15 + 1 + 15+ 1 + 9 + 1 + 22 + 1 + 1 | +5
+
+    if (mBuffer->size() < 14) {
+        return -EAGAIN;
+    }
+
+    unsigned pack_stuffing_length = mBuffer->data()[13] & 7;
+
+    return pack_stuffing_length + 14;
+}
+
+ssize_t MPEG2PSExtractor::dequeueSystemHeader() {
+    if (mBuffer->size() < 6) {
+        return -EAGAIN;
+    }
+
+    unsigned header_length = U16_AT(mBuffer->data() + 4);
+
+    return header_length + 6;
+}
+
+ssize_t MPEG2PSExtractor::dequeuePES() {
+    if (mBuffer->size() < 6) {
+        return -EAGAIN;
+    }
+
+    unsigned PES_packet_length = U16_AT(mBuffer->data() + 4);
+    CHECK_NE(PES_packet_length, 0u);
+
+    size_t n = PES_packet_length + 6;
+
+    if (mBuffer->size() < n) {
+        return -EAGAIN;
+    }
+
+    ABitReader br(mBuffer->data(), n);
+
+    unsigned packet_startcode_prefix = br.getBits(24);
+
+    LOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix);
+
+    if (packet_startcode_prefix != 1) {
+        LOGV("Supposedly payload_unit_start=1 unit does not start "
+             "with startcode.");
+
+        return ERROR_MALFORMED;
+    }
+
+    CHECK_EQ(packet_startcode_prefix, 0x000001u);
+
+    unsigned stream_id = br.getBits(8);
+    LOGV("stream_id = 0x%02x", stream_id);
+
+    /* unsigned PES_packet_length = */br.getBits(16);
+
+    if (stream_id == 0xbc) {
+        // program_stream_map
+
+        if (!mScanning) {
+            return n;
+        }
+
+        mStreamTypeByESID.clear();
+
+        /* unsigned current_next_indicator = */br.getBits(1);
+        /* unsigned reserved = */br.getBits(2);
+        /* unsigned program_stream_map_version = */br.getBits(5);
+        /* unsigned reserved = */br.getBits(7);
+        /* unsigned marker_bit = */br.getBits(1);
+        unsigned program_stream_info_length = br.getBits(16);
+
+        size_t offset = 0;
+        while (offset < program_stream_info_length) {
+            if (offset + 2 > program_stream_info_length) {
+                return ERROR_MALFORMED;
+            }
+
+            unsigned descriptor_tag = br.getBits(8);
+            unsigned descriptor_length = br.getBits(8);
+
+            LOGI("found descriptor tag 0x%02x of length %u",
+                 descriptor_tag, descriptor_length);
+
+            if (offset + 2 + descriptor_length > program_stream_info_length) {
+                return ERROR_MALFORMED;
+            }
+
+            br.skipBits(8 * descriptor_length);
+
+            offset += 2 + descriptor_length;
+        }
+
+        unsigned elementary_stream_map_length = br.getBits(16);
+
+        offset = 0;
+        while (offset < elementary_stream_map_length) {
+            if (offset + 4 > elementary_stream_map_length) {
+                return ERROR_MALFORMED;
+            }
+
+            unsigned stream_type = br.getBits(8);
+            unsigned elementary_stream_id = br.getBits(8);
+
+            LOGI("elementary stream id 0x%02x has stream type 0x%02x",
+                 elementary_stream_id, stream_type);
+
+            mStreamTypeByESID.add(elementary_stream_id, stream_type);
+
+            unsigned elementary_stream_info_length = br.getBits(16);
+
+            if (offset + 4 + elementary_stream_info_length
+                    > elementary_stream_map_length) {
+                return ERROR_MALFORMED;
+            }
+
+            offset += 4 + elementary_stream_info_length;
+        }
+
+        /* unsigned CRC32 = */br.getBits(32);
+
+        mProgramStreamMapValid = true;
+    } else if (stream_id != 0xbe  // padding_stream
+            && stream_id != 0xbf  // private_stream_2
+            && stream_id != 0xf0  // ECM
+            && stream_id != 0xf1  // EMM
+            && stream_id != 0xff  // program_stream_directory
+            && stream_id != 0xf2  // DSMCC
+            && stream_id != 0xf8) {  // H.222.1 type E
+        CHECK_EQ(br.getBits(2), 2u);
+
+        /* unsigned PES_scrambling_control = */br.getBits(2);
+        /* unsigned PES_priority = */br.getBits(1);
+        /* unsigned data_alignment_indicator = */br.getBits(1);
+        /* unsigned copyright = */br.getBits(1);
+        /* unsigned original_or_copy = */br.getBits(1);
+
+        unsigned PTS_DTS_flags = br.getBits(2);
+        LOGV("PTS_DTS_flags = %u", PTS_DTS_flags);
+
+        unsigned ESCR_flag = br.getBits(1);
+        LOGV("ESCR_flag = %u", ESCR_flag);
+
+        unsigned ES_rate_flag = br.getBits(1);
+        LOGV("ES_rate_flag = %u", ES_rate_flag);
+
+        unsigned DSM_trick_mode_flag = br.getBits(1);
+        LOGV("DSM_trick_mode_flag = %u", DSM_trick_mode_flag);
+
+        unsigned additional_copy_info_flag = br.getBits(1);
+        LOGV("additional_copy_info_flag = %u", additional_copy_info_flag);
+
+        /* unsigned PES_CRC_flag = */br.getBits(1);
+        /* PES_extension_flag = */br.getBits(1);
+
+        unsigned PES_header_data_length = br.getBits(8);
+        LOGV("PES_header_data_length = %u", PES_header_data_length);
+
+        unsigned optional_bytes_remaining = PES_header_data_length;
+
+        uint64_t PTS = 0, DTS = 0;
+
+        if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
+            CHECK_GE(optional_bytes_remaining, 5u);
+
+            CHECK_EQ(br.getBits(4), PTS_DTS_flags);
+
+            PTS = ((uint64_t)br.getBits(3)) << 30;
+            CHECK_EQ(br.getBits(1), 1u);
+            PTS |= ((uint64_t)br.getBits(15)) << 15;
+            CHECK_EQ(br.getBits(1), 1u);
+            PTS |= br.getBits(15);
+            CHECK_EQ(br.getBits(1), 1u);
+
+            LOGV("PTS = %llu", PTS);
+            // LOGI("PTS = %.2f secs", PTS / 90000.0f);
+
+            optional_bytes_remaining -= 5;
+
+            if (PTS_DTS_flags == 3) {
+                CHECK_GE(optional_bytes_remaining, 5u);
+
+                CHECK_EQ(br.getBits(4), 1u);
+
+                DTS = ((uint64_t)br.getBits(3)) << 30;
+                CHECK_EQ(br.getBits(1), 1u);
+                DTS |= ((uint64_t)br.getBits(15)) << 15;
+                CHECK_EQ(br.getBits(1), 1u);
+                DTS |= br.getBits(15);
+                CHECK_EQ(br.getBits(1), 1u);
+
+                LOGV("DTS = %llu", DTS);
+
+                optional_bytes_remaining -= 5;
+            }
+        }
+
+        if (ESCR_flag) {
+            CHECK_GE(optional_bytes_remaining, 6u);
+
+            br.getBits(2);
+
+            uint64_t ESCR = ((uint64_t)br.getBits(3)) << 30;
+            CHECK_EQ(br.getBits(1), 1u);
+            ESCR |= ((uint64_t)br.getBits(15)) << 15;
+            CHECK_EQ(br.getBits(1), 1u);
+            ESCR |= br.getBits(15);
+            CHECK_EQ(br.getBits(1), 1u);
+
+            LOGV("ESCR = %llu", ESCR);
+            /* unsigned ESCR_extension = */br.getBits(9);
+
+            CHECK_EQ(br.getBits(1), 1u);
+
+            optional_bytes_remaining -= 6;
+        }
+
+        if (ES_rate_flag) {
+            CHECK_GE(optional_bytes_remaining, 3u);
+
+            CHECK_EQ(br.getBits(1), 1u);
+            /* unsigned ES_rate = */br.getBits(22);
+            CHECK_EQ(br.getBits(1), 1u);
+
+            optional_bytes_remaining -= 3;
+        }
+
+        br.skipBits(optional_bytes_remaining * 8);
+
+        // ES data follows.
+
+        CHECK_GE(PES_packet_length, PES_header_data_length + 3);
+
+        unsigned dataLength =
+            PES_packet_length - 3 - PES_header_data_length;
+
+        if (br.numBitsLeft() < dataLength * 8) {
+            LOGE("PES packet does not carry enough data to contain "
+                 "payload. (numBitsLeft = %d, required = %d)",
+                 br.numBitsLeft(), dataLength * 8);
+
+            return ERROR_MALFORMED;
+        }
+
+        CHECK_GE(br.numBitsLeft(), dataLength * 8);
+
+        ssize_t index = mTracks.indexOfKey(stream_id);
+        if (index < 0 && mScanning) {
+            unsigned streamType;
+
+            ssize_t streamTypeIndex;
+            if (mProgramStreamMapValid
+                    && (streamTypeIndex =
+                            mStreamTypeByESID.indexOfKey(stream_id)) >= 0) {
+                streamType = mStreamTypeByESID.valueAt(streamTypeIndex);
+            } else if ((stream_id & ~0x1f) == 0xc0) {
+                // ISO/IEC 13818-3 or ISO/IEC 11172-3 or ISO/IEC 13818-7
+                // or ISO/IEC 14496-3 audio
+                streamType = ATSParser::STREAMTYPE_MPEG2_AUDIO;
+            } else if ((stream_id & ~0x0f) == 0xe0) {
+                // ISO/IEC 13818-2 or ISO/IEC 11172-2 or ISO/IEC 14496-2 video
+                streamType = ATSParser::STREAMTYPE_MPEG2_VIDEO;
+            } else {
+                streamType = ATSParser::STREAMTYPE_RESERVED;
+            }
+
+            index = mTracks.add(
+                    stream_id, new Track(this, stream_id, streamType));
+        }
+
+        status_t err = OK;
+
+        if (index >= 0) {
+            err =
+                mTracks.editValueAt(index)->appendPESData(
+                    PTS_DTS_flags, PTS, DTS, br.data(), dataLength);
+        }
+
+        br.skipBits(dataLength * 8);
+
+        if (err != OK) {
+            return err;
+        }
+    } else if (stream_id == 0xbe) {  // padding_stream
+        CHECK_NE(PES_packet_length, 0u);
+        br.skipBits(PES_packet_length * 8);
+    } else {
+        CHECK_NE(PES_packet_length, 0u);
+        br.skipBits(PES_packet_length * 8);
+    }
+
+    return n;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG2PSExtractor::Track::Track(
+        MPEG2PSExtractor *extractor, unsigned stream_id, unsigned stream_type)
+    : mExtractor(extractor),
+      mStreamID(stream_id),
+      mStreamType(stream_type),
+      mQueue(NULL) {
+    bool supported = true;
+    ElementaryStreamQueue::Mode mode;
+
+    switch (mStreamType) {
+        case ATSParser::STREAMTYPE_H264:
+            mode = ElementaryStreamQueue::H264;
+            break;
+        case ATSParser::STREAMTYPE_MPEG2_AUDIO_ATDS:
+            mode = ElementaryStreamQueue::AAC;
+            break;
+        case ATSParser::STREAMTYPE_MPEG1_AUDIO:
+        case ATSParser::STREAMTYPE_MPEG2_AUDIO:
+            mode = ElementaryStreamQueue::MPEG_AUDIO;
+            break;
+
+        case ATSParser::STREAMTYPE_MPEG1_VIDEO:
+        case ATSParser::STREAMTYPE_MPEG2_VIDEO:
+            mode = ElementaryStreamQueue::MPEG_VIDEO;
+            break;
+
+        case ATSParser::STREAMTYPE_MPEG4_VIDEO:
+            mode = ElementaryStreamQueue::MPEG4_VIDEO;
+            break;
+
+        default:
+            supported = false;
+            break;
+    }
+
+    if (supported) {
+        mQueue = new ElementaryStreamQueue(mode);
+    } else {
+        LOGI("unsupported stream ID 0x%02x", stream_id);
+    }
+}
+
+MPEG2PSExtractor::Track::~Track() {
+    delete mQueue;
+    mQueue = NULL;
+}
+
+status_t MPEG2PSExtractor::Track::start(MetaData *params) {
+    if (mSource == NULL) {
+        return NO_INIT;
+    }
+
+    return mSource->start(params);
+}
+
+status_t MPEG2PSExtractor::Track::stop() {
+    if (mSource == NULL) {
+        return NO_INIT;
+    }
+
+    return mSource->stop();
+}
+
+sp<MetaData> MPEG2PSExtractor::Track::getFormat() {
+    if (mSource == NULL) {
+        return NULL;
+    }
+
+    return mSource->getFormat();
+}
+
+status_t MPEG2PSExtractor::Track::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    if (mSource == NULL) {
+        return NO_INIT;
+    }
+
+    status_t finalResult;
+    while (!mSource->hasBufferAvailable(&finalResult)) {
+        if (finalResult != OK) {
+            return ERROR_END_OF_STREAM;
+        }
+
+        status_t err = mExtractor->feedMore();
+
+        if (err != OK) {
+            mSource->signalEOS(err);
+        }
+    }
+
+    return mSource->read(buffer, options);
+}
+
+status_t MPEG2PSExtractor::Track::appendPESData(
+        unsigned PTS_DTS_flags,
+        uint64_t PTS, uint64_t DTS,
+        const uint8_t *data, size_t size) {
+    if (mQueue == NULL) {
+        return OK;
+    }
+
+    int64_t timeUs;
+    if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
+        timeUs = (PTS * 100) / 9;
+    } else {
+        timeUs = 0;
+    }
+
+    status_t err = mQueue->appendData(data, size, timeUs);
+
+    if (err != OK) {
+        return err;
+    }
+
+    sp<ABuffer> accessUnit;
+    while ((accessUnit = mQueue->dequeueAccessUnit()) != NULL) {
+        if (mSource == NULL) {
+            sp<MetaData> meta = mQueue->getFormat();
+
+            if (meta != NULL) {
+                LOGV("Stream ID 0x%02x now has data.", mStreamID);
+
+                mSource = new AnotherPacketSource(meta);
+                mSource->queueAccessUnit(accessUnit);
+            }
+        } else if (mQueue->getFormat() != NULL) {
+            mSource->queueAccessUnit(accessUnit);
+        }
+    }
+
+    return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+MPEG2PSExtractor::WrappedTrack::WrappedTrack(
+        const sp<MPEG2PSExtractor> &extractor, const sp<Track> &track)
+    : mExtractor(extractor),
+      mTrack(track) {
+}
+
+MPEG2PSExtractor::WrappedTrack::~WrappedTrack() {
+}
+
+status_t MPEG2PSExtractor::WrappedTrack::start(MetaData *params) {
+    return mTrack->start(params);
+}
+
+status_t MPEG2PSExtractor::WrappedTrack::stop() {
+    return mTrack->stop();
+}
+
+sp<MetaData> MPEG2PSExtractor::WrappedTrack::getFormat() {
+    return mTrack->getFormat();
+}
+
+status_t MPEG2PSExtractor::WrappedTrack::read(
+        MediaBuffer **buffer, const ReadOptions *options) {
+    return mTrack->read(buffer, options);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+bool SniffMPEG2PS(
+        const sp<DataSource> &source, String8 *mimeType, float *confidence,
+        sp<AMessage> *) {
+    uint8_t header[5];
+    if (source->readAt(0, header, sizeof(header)) < (ssize_t)sizeof(header)) {
+        return false;
+    }
+
+    if (memcmp("\x00\x00\x01\xba", header, 4) || (header[4] >> 6) != 1) {
+        return false;
+    }
+
+    *confidence = 0.25f;  // Slightly larger than .mp3 extractor's confidence
+
+    mimeType->setTo(MEDIA_MIMETYPE_CONTAINER_MPEG2PS);
+
+    return true;
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 4ecb92f..3f4cdb5 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -34,8 +34,8 @@
 #include <media/stagefright/foundation/AString.h>
 #include <media/stagefright/foundation/base64.h>
 #include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaData.h>
 #include <utils/Vector.h>
 
@@ -402,43 +402,15 @@
     return csd;
 }
 
-static bool GetClockRate(const AString &desc, uint32_t *clockRate) {
-    ssize_t slashPos = desc.find("/");
-    if (slashPos < 0) {
-        return false;
-    }
-
-    const char *s = desc.c_str() + slashPos + 1;
-
-    char *end;
-    unsigned long x = strtoul(s, &end, 10);
-
-    if (end == s || (*end != '\0' && *end != '/')) {
-        return false;
-    }
-
-    *clockRate = x;
-
-    return true;
-}
-
 APacketSource::APacketSource(
         const sp<ASessionDescription> &sessionDesc, size_t index)
     : mInitCheck(NO_INIT),
-      mFormat(new MetaData),
-      mEOSResult(OK),
-      mIsAVC(false),
-      mScanForIDR(true),
-      mRTPTimeBase(0),
-      mNormalPlayTimeBaseUs(0),
-      mLastNormalPlayTimeUs(0) {
+      mFormat(new MetaData) {
     unsigned long PT;
     AString desc;
     AString params;
     sessionDesc->getFormatType(index, &PT, &desc, &params);
 
-    CHECK(GetClockRate(desc, &mClockRate));
-
     int64_t durationUs;
     if (sessionDesc->getDurationUs(&durationUs)) {
         mFormat->setInt64(kKeyDuration, durationUs);
@@ -448,8 +420,6 @@
 
     mInitCheck = OK;
     if (!strncmp(desc.c_str(), "H264/", 5)) {
-        mIsAVC = true;
-
         mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
 
         int32_t width, height;
@@ -602,137 +572,8 @@
     return mInitCheck;
 }
 
-status_t APacketSource::start(MetaData *params) {
-    return OK;
-}
-
-status_t APacketSource::stop() {
-    return OK;
-}
-
 sp<MetaData> APacketSource::getFormat() {
     return mFormat;
 }
 
-status_t APacketSource::read(
-        MediaBuffer **out, const ReadOptions *) {
-    *out = NULL;
-
-    Mutex::Autolock autoLock(mLock);
-    while (mEOSResult == OK && mBuffers.empty()) {
-        mCondition.wait(mLock);
-    }
-
-    if (!mBuffers.empty()) {
-        const sp<ABuffer> buffer = *mBuffers.begin();
-
-        updateNormalPlayTime_l(buffer);
-
-        int64_t timeUs;
-        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
-
-        MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
-        mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
-
-        *out = mediaBuffer;
-
-        mBuffers.erase(mBuffers.begin());
-        return OK;
-    }
-
-    return mEOSResult;
-}
-
-void APacketSource::updateNormalPlayTime_l(const sp<ABuffer> &buffer) {
-    uint32_t rtpTime;
-    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
-
-    mLastNormalPlayTimeUs =
-        (((double)rtpTime - (double)mRTPTimeBase) / mClockRate)
-            * 1000000ll
-            + mNormalPlayTimeBaseUs;
-}
-
-void APacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
-    int32_t damaged;
-    if (buffer->meta()->findInt32("damaged", &damaged) && damaged) {
-        LOGV("discarding damaged AU");
-        return;
-    }
-
-    if (mScanForIDR && mIsAVC) {
-        // This pretty piece of code ensures that the first access unit
-        // fed to the decoder after stream-start or seek is guaranteed to
-        // be an IDR frame. This is to workaround limitations of a certain
-        // hardware h.264 decoder that requires this to be the case.
-
-        if (!IsIDR(buffer)) {
-            LOGV("skipping AU while scanning for next IDR frame.");
-            return;
-        }
-
-        mScanForIDR = false;
-    }
-
-    Mutex::Autolock autoLock(mLock);
-    mBuffers.push_back(buffer);
-    mCondition.signal();
-}
-
-void APacketSource::signalEOS(status_t result) {
-    CHECK(result != OK);
-
-    Mutex::Autolock autoLock(mLock);
-    mEOSResult = result;
-    mCondition.signal();
-}
-
-void APacketSource::flushQueue() {
-    Mutex::Autolock autoLock(mLock);
-    mBuffers.clear();
-
-    mScanForIDR = true;
-}
-
-int64_t APacketSource::getNormalPlayTimeUs() {
-    Mutex::Autolock autoLock(mLock);
-    return mLastNormalPlayTimeUs;
-}
-
-void APacketSource::setNormalPlayTimeMapping(
-        uint32_t rtpTime, int64_t normalPlayTimeUs) {
-    Mutex::Autolock autoLock(mLock);
-
-    mRTPTimeBase = rtpTime;
-    mNormalPlayTimeBaseUs = normalPlayTimeUs;
-}
-
-int64_t APacketSource::getQueueDurationUs(bool *eos) {
-    Mutex::Autolock autoLock(mLock);
-
-    *eos = (mEOSResult != OK);
-
-    if (mBuffers.size() < 2) {
-        return 0;
-    }
-
-    const sp<ABuffer> first = *mBuffers.begin();
-    const sp<ABuffer> last = *--mBuffers.end();
-
-    int64_t firstTimeUs;
-    CHECK(first->meta()->findInt64("timeUs", &firstTimeUs));
-
-    int64_t lastTimeUs;
-    CHECK(last->meta()->findInt64("timeUs", &lastTimeUs));
-
-    if (lastTimeUs < firstTimeUs) {
-        LOGE("Huh? Time moving backwards? %lld > %lld",
-             firstTimeUs, lastTimeUs);
-
-        return 0;
-    }
-
-    return lastTimeUs - firstTimeUs;
-}
-
 }  // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.h b/media/libstagefright/rtsp/APacketSource.h
index 7a77fc6..530e537 100644
--- a/media/libstagefright/rtsp/APacketSource.h
+++ b/media/libstagefright/rtsp/APacketSource.h
@@ -19,63 +19,27 @@
 #define A_PACKET_SOURCE_H_
 
 #include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
-#include <utils/threads.h>
-#include <utils/List.h>
+#include <media/stagefright/MetaData.h>
+#include <utils/RefBase.h>
 
 namespace android {
 
-struct ABuffer;
 struct ASessionDescription;
 
-struct APacketSource : public MediaSource {
+struct APacketSource : public RefBase {
     APacketSource(const sp<ASessionDescription> &sessionDesc, size_t index);
 
     status_t initCheck() const;
 
-    virtual status_t start(MetaData *params = NULL);
-    virtual status_t stop();
     virtual sp<MetaData> getFormat();
 
-    virtual status_t read(
-            MediaBuffer **buffer, const ReadOptions *options = NULL);
-
-    void queueAccessUnit(const sp<ABuffer> &buffer);
-    void signalEOS(status_t result);
-
-    void flushQueue();
-
-    int64_t getNormalPlayTimeUs();
-
-    void setNormalPlayTimeMapping(
-            uint32_t rtpTime, int64_t normalPlayTimeUs);
-
-    int64_t getQueueDurationUs(bool *eos);
-
 protected:
     virtual ~APacketSource();
 
 private:
     status_t mInitCheck;
 
-    Mutex mLock;
-    Condition mCondition;
-
     sp<MetaData> mFormat;
-    List<sp<ABuffer> > mBuffers;
-    status_t mEOSResult;
-
-    bool mIsAVC;
-    bool mScanForIDR;
-
-    uint32_t mClockRate;
-
-    uint32_t mRTPTimeBase;
-    int64_t mNormalPlayTimeBaseUs;
-
-    int64_t mLastNormalPlayTimeUs;
-
-    void updateNormalPlayTime_l(const sp<ABuffer> &buffer);
 
     DISALLOW_EVIL_CONSTRUCTORS(APacketSource);
 };
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 47de4e0..cd374e2 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -220,7 +220,7 @@
     }
 
     if (it == mStreams.end()) {
-        TRESPASS();
+        return;
     }
 
     mStreams.erase(it);
@@ -274,41 +274,52 @@
     }
 
     int res = select(maxSocket + 1, &rs, NULL, NULL, &tv);
-    CHECK_GE(res, 0);
 
     if (res > 0) {
-        for (List<StreamInfo>::iterator it = mStreams.begin();
-             it != mStreams.end(); ++it) {
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
             if ((*it).mIsInjected) {
+                ++it;
                 continue;
             }
 
+            status_t err = OK;
             if (FD_ISSET(it->mRTPSocket, &rs)) {
-                receive(&*it, true);
+                err = receive(&*it, true);
             }
-            if (FD_ISSET(it->mRTCPSocket, &rs)) {
-                receive(&*it, false);
+            if (err == OK && FD_ISSET(it->mRTCPSocket, &rs)) {
+                err = receive(&*it, false);
             }
+
+            if (err == -ECONNRESET) {
+                // socket failure, this stream is dead, Jim.
+
+                LOGW("failed to receive RTP/RTCP datagram.");
+                it = mStreams.erase(it);
+                continue;
+            }
+
+            ++it;
         }
     }
 
-    postPollEvent();
-
     int64_t nowUs = ALooper::GetNowUs();
     if (mLastReceiverReportTimeUs <= 0
             || mLastReceiverReportTimeUs + 5000000ll <= nowUs) {
         sp<ABuffer> buffer = new ABuffer(kMaxUDPSize);
-        for (List<StreamInfo>::iterator it = mStreams.begin();
-             it != mStreams.end(); ++it) {
+        List<StreamInfo>::iterator it = mStreams.begin();
+        while (it != mStreams.end()) {
             StreamInfo *s = &*it;
 
             if (s->mIsInjected) {
+                ++it;
                 continue;
             }
 
             if (s->mNumRTCPPacketsReceived == 0) {
                 // We have never received any RTCP packets on this stream,
                 // we don't even know where to send a report.
+                ++it;
                 continue;
             }
 
@@ -327,16 +338,34 @@
             if (buffer->size() > 0) {
                 LOGV("Sending RR...");
 
-                ssize_t n = sendto(
+                ssize_t n;
+                do {
+                    n = sendto(
                         s->mRTCPSocket, buffer->data(), buffer->size(), 0,
                         (const struct sockaddr *)&s->mRemoteRTCPAddr,
                         sizeof(s->mRemoteRTCPAddr));
+                } while (n < 0 && errno == EINTR);
+
+                if (n <= 0) {
+                    LOGW("failed to send RTCP receiver report (%s).",
+                         n == 0 ? "connection gone" : strerror(errno));
+
+                    it = mStreams.erase(it);
+                    continue;
+                }
+
                 CHECK_EQ(n, (ssize_t)buffer->size());
 
                 mLastReceiverReportTimeUs = nowUs;
             }
+
+            ++it;
         }
     }
+
+    if (!mStreams.empty()) {
+        postPollEvent();
+    }
 }
 
 status_t ARTPConnection::receive(StreamInfo *s, bool receiveRTP) {
@@ -350,16 +379,19 @@
         (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
             ? sizeof(s->mRemoteRTCPAddr) : 0;
 
-    ssize_t nbytes = recvfrom(
+    ssize_t nbytes;
+    do {
+        nbytes = recvfrom(
             receiveRTP ? s->mRTPSocket : s->mRTCPSocket,
             buffer->data(),
             buffer->capacity(),
             0,
             remoteAddrLen > 0 ? (struct sockaddr *)&s->mRemoteRTCPAddr : NULL,
             remoteAddrLen > 0 ? &remoteAddrLen : NULL);
+    } while (nbytes < 0 && errno == EINTR);
 
-    if (nbytes < 0) {
-        return -1;
+    if (nbytes <= 0) {
+        return -ECONNRESET;
     }
 
     buffer->setRange(0, nbytes);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index bd0e491..4f0363b 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -187,10 +187,13 @@
     return true;
 }
 
-static void MakeSocketBlocking(int s, bool blocking) {
+static status_t MakeSocketBlocking(int s, bool blocking) {
     // Make socket non-blocking.
     int flags = fcntl(s, F_GETFL, 0);
-    CHECK_NE(flags, -1);
+
+    if (flags == -1) {
+        return UNKNOWN_ERROR;
+    }
 
     if (blocking) {
         flags &= ~O_NONBLOCK;
@@ -198,7 +201,9 @@
         flags |= O_NONBLOCK;
     }
 
-    CHECK_NE(fcntl(s, F_SETFL, flags), -1);
+    flags = fcntl(s, F_SETFL, flags);
+
+    return flags == -1 ? UNKNOWN_ERROR : OK;
 }
 
 void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
@@ -302,27 +307,32 @@
     reply->post();
 }
 
+void ARTSPConnection::performDisconnect() {
+    if (mUIDValid) {
+        HTTPBase::UnRegisterSocketUserTag(mSocket);
+    }
+    close(mSocket);
+    mSocket = -1;
+
+    flushPendingRequests();
+
+    mUser.clear();
+    mPass.clear();
+    mAuthType = NONE;
+    mNonce.clear();
+
+    mState = DISCONNECTED;
+}
+
 void ARTSPConnection::onDisconnect(const sp<AMessage> &msg) {
     if (mState == CONNECTED || mState == CONNECTING) {
-        if (mUIDValid) {
-            HTTPBase::UnRegisterSocketUserTag(mSocket);
-        }
-        close(mSocket);
-        mSocket = -1;
-
-        flushPendingRequests();
+        performDisconnect();
     }
 
     sp<AMessage> reply;
     CHECK(msg->findMessage("reply", &reply));
 
     reply->setInt32("result", OK);
-    mState = DISCONNECTED;
-
-    mUser.clear();
-    mPass.clear();
-    mAuthType = NONE;
-    mNonce.clear();
 
     reply->post();
 }
@@ -427,21 +437,25 @@
             send(mSocket, request.c_str() + numBytesSent,
                  request.size() - numBytesSent, 0);
 
-        if (n == 0) {
-            // Server closed the connection.
-            LOGE("Server unexpectedly closed the connection.");
+        if (n < 0 && errno == EINTR) {
+            continue;
+        }
 
-            reply->setInt32("result", ERROR_IO);
-            reply->post();
-            return;
-        } else if (n < 0) {
-            if (errno == EINTR) {
-                continue;
+        if (n <= 0) {
+            performDisconnect();
+
+            if (n == 0) {
+                // Server closed the connection.
+                LOGE("Server unexpectedly closed the connection.");
+
+                reply->setInt32("result", ERROR_IO);
+                reply->post();
+            } else {
+                LOGE("Error sending rtsp request. (%s)", strerror(errno));
+                reply->setInt32("result", -errno);
+                reply->post();
             }
 
-            LOGE("Error sending rtsp request.");
-            reply->setInt32("result", -errno);
-            reply->post();
             return;
         }
 
@@ -512,17 +526,22 @@
     size_t offset = 0;
     while (offset < size) {
         ssize_t n = recv(mSocket, (uint8_t *)data + offset, size - offset, 0);
-        if (n == 0) {
-            // Server closed the connection.
-            LOGE("Server unexpectedly closed the connection.");
-            return ERROR_IO;
-        } else if (n < 0) {
-            if (errno == EINTR) {
-                continue;
-            }
 
-            LOGE("Error reading rtsp response.");
-            return -errno;
+        if (n < 0 && errno == EINTR) {
+            continue;
+        }
+
+        if (n <= 0) {
+            performDisconnect();
+
+            if (n == 0) {
+                // Server closed the connection.
+                LOGE("Server unexpectedly closed the connection.");
+                return ERROR_IO;
+            } else {
+                LOGE("Error reading rtsp response. (%s)", strerror(errno));
+                return -errno;
+            }
         }
 
         offset += (size_t)n;
@@ -681,24 +700,8 @@
     if (contentLength > 0) {
         response->mContent = new ABuffer(contentLength);
 
-        size_t numBytesRead = 0;
-        while (numBytesRead < contentLength) {
-            ssize_t n = recv(
-                    mSocket, response->mContent->data() + numBytesRead,
-                    contentLength - numBytesRead, 0);
-
-            if (n == 0) {
-                // Server closed the connection.
-                TRESPASS();
-            } else if (n < 0) {
-                if (errno == EINTR) {
-                    continue;
-                }
-
-                TRESPASS();
-            }
-
-            numBytesRead += (size_t)n;
+        if (receive(response->mContent->data(), contentLength) != OK) {
+            return false;
         }
     }
 
@@ -765,17 +768,20 @@
             send(mSocket, response.c_str() + numBytesSent,
                  response.size() - numBytesSent, 0);
 
-        if (n == 0) {
-            // Server closed the connection.
-            LOGE("Server unexpectedly closed the connection.");
+        if (n < 0 && errno == EINTR) {
+            continue;
+        }
 
-            return false;
-        } else if (n < 0) {
-            if (errno == EINTR) {
-                continue;
+        if (n <= 0) {
+            if (n == 0) {
+                // Server closed the connection.
+                LOGE("Server unexpectedly closed the connection.");
+            } else {
+                LOGE("Error sending rtsp response (%s).", strerror(errno));
             }
 
-            LOGE("Error sending rtsp response.");
+            performDisconnect();
+
             return false;
         }
 
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 5cb84fd..68f2d59 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -91,6 +91,8 @@
 
     AString mUserAgent;
 
+    void performDisconnect();
+
     void onConnect(const sp<AMessage> &msg);
     void onDisconnect(const sp<AMessage> &msg);
     void onCompleteConnection(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/ARTSPController.cpp b/media/libstagefright/rtsp/ARTSPController.cpp
deleted file mode 100644
index 2ebae7e..0000000
--- a/media/libstagefright/rtsp/ARTSPController.cpp
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "ARTSPController.h"
-
-#include "MyHandler.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-ARTSPController::ARTSPController(const sp<ALooper> &looper)
-    : mState(DISCONNECTED),
-      mLooper(looper),
-      mUIDValid(false),
-      mSeekDoneCb(NULL),
-      mSeekDoneCookie(NULL),
-      mLastSeekCompletedTimeUs(-1) {
-    mReflector = new AHandlerReflector<ARTSPController>(this);
-    looper->registerHandler(mReflector);
-}
-
-ARTSPController::~ARTSPController() {
-    CHECK_EQ((int)mState, (int)DISCONNECTED);
-    mLooper->unregisterHandler(mReflector->id());
-}
-
-void ARTSPController::setUID(uid_t uid) {
-    mUIDValid = true;
-    mUID = uid;
-}
-
-status_t ARTSPController::connect(const char *url) {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mState != DISCONNECTED) {
-        return ERROR_ALREADY_CONNECTED;
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatConnectDone, mReflector->id());
-
-    mHandler = new MyHandler(url, mLooper, mUIDValid, mUID);
-
-    mState = CONNECTING;
-
-    mHandler->connect(msg);
-
-    while (mState == CONNECTING) {
-        mCondition.wait(mLock);
-    }
-
-    if (mState != CONNECTED) {
-        mHandler.clear();
-    }
-
-    return mConnectionResult;
-}
-
-void ARTSPController::disconnect() {
-    Mutex::Autolock autoLock(mLock);
-
-    if (mState == CONNECTING) {
-        mState = DISCONNECTED;
-        mConnectionResult = ERROR_IO;
-        mCondition.broadcast();
-
-        mHandler.clear();
-        return;
-    } else if (mState != CONNECTED) {
-        return;
-    }
-
-    sp<AMessage> msg = new AMessage(kWhatDisconnectDone, mReflector->id());
-    mHandler->disconnect(msg);
-
-    while (mState == CONNECTED) {
-        mCondition.wait(mLock);
-    }
-
-    mHandler.clear();
-}
-
-void ARTSPController::seekAsync(
-        int64_t timeUs,
-        void (*seekDoneCb)(void *), void *cookie) {
-    Mutex::Autolock autoLock(mLock);
-
-    CHECK(seekDoneCb != NULL);
-    CHECK(mSeekDoneCb == NULL);
-
-    // Ignore seek requests that are too soon after the previous one has
-    // completed, we don't want to swamp the server.
-
-    bool tooEarly =
-        mLastSeekCompletedTimeUs >= 0
-            && ALooper::GetNowUs() < mLastSeekCompletedTimeUs + 500000ll;
-
-    if (mState != CONNECTED || tooEarly) {
-        (*seekDoneCb)(cookie);
-        return;
-    }
-
-    mSeekDoneCb = seekDoneCb;
-    mSeekDoneCookie = cookie;
-
-    sp<AMessage> msg = new AMessage(kWhatSeekDone, mReflector->id());
-    mHandler->seek(timeUs, msg);
-}
-
-size_t ARTSPController::countTracks() {
-    if (mHandler == NULL) {
-        return 0;
-    }
-
-    return mHandler->countTracks();
-}
-
-sp<MediaSource> ARTSPController::getTrack(size_t index) {
-    CHECK(mHandler != NULL);
-
-    return mHandler->getPacketSource(index);
-}
-
-sp<MetaData> ARTSPController::getTrackMetaData(
-        size_t index, uint32_t flags) {
-    CHECK(mHandler != NULL);
-
-    return mHandler->getPacketSource(index)->getFormat();
-}
-
-void ARTSPController::onMessageReceived(const sp<AMessage> &msg) {
-    switch (msg->what()) {
-        case kWhatConnectDone:
-        {
-            Mutex::Autolock autoLock(mLock);
-
-            CHECK(msg->findInt32("result", &mConnectionResult));
-            mState = (mConnectionResult == OK) ? CONNECTED : DISCONNECTED;
-
-            mCondition.signal();
-            break;
-        }
-
-        case kWhatDisconnectDone:
-        {
-            Mutex::Autolock autoLock(mLock);
-            mState = DISCONNECTED;
-            mCondition.signal();
-            break;
-        }
-
-        case kWhatSeekDone:
-        {
-            LOGI("seek done");
-
-            mLastSeekCompletedTimeUs = ALooper::GetNowUs();
-
-            void (*seekDoneCb)(void *) = mSeekDoneCb;
-            mSeekDoneCb = NULL;
-
-            (*seekDoneCb)(mSeekDoneCookie);
-            break;
-        }
-
-        default:
-            TRESPASS();
-            break;
-    }
-}
-
-int64_t ARTSPController::getNormalPlayTimeUs() {
-    CHECK(mHandler != NULL);
-    return mHandler->getNormalPlayTimeUs();
-}
-
-int64_t ARTSPController::getQueueDurationUs(bool *eos) {
-    *eos = true;
-
-    int64_t minQueuedDurationUs = 0;
-    for (size_t i = 0; i < mHandler->countTracks(); ++i) {
-        sp<APacketSource> source = mHandler->getPacketSource(i);
-
-        bool newEOS;
-        int64_t queuedDurationUs = source->getQueueDurationUs(&newEOS);
-
-        if (!newEOS) {
-            *eos = false;
-        }
-
-        if (i == 0 || queuedDurationUs < minQueuedDurationUs) {
-            minQueuedDurationUs = queuedDurationUs;
-        }
-    }
-
-    return minQueuedDurationUs;
-}
-
-}  // namespace android
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 8530ff3..8230347 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -15,7 +15,6 @@
         ARTPSource.cpp              \
         ARTPWriter.cpp              \
         ARTSPConnection.cpp         \
-        ARTSPController.cpp         \
         ASessionDescription.cpp     \
 
 LOCAL_C_INCLUDES:= \
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 8128813..794c60b 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -44,12 +44,14 @@
 
 // If no access units are received within 5 secs, assume that the rtp
 // stream has ended and signal end of stream.
-static int64_t kAccessUnitTimeoutUs = 5000000ll;
+static int64_t kAccessUnitTimeoutUs = 10000000ll;
 
 // If no access units arrive for the first 10 secs after starting the
 // stream, assume none ever will and signal EOS or switch transports.
 static int64_t kStartupTimeoutUs = 10000000ll;
 
+static int64_t kDefaultKeepAliveTimeoutUs = 60000000ll;
+
 namespace android {
 
 static void MakeUserAgentString(AString *s) {
@@ -94,12 +96,24 @@
 }
 
 struct MyHandler : public AHandler {
+    enum {
+        kWhatConnected                  = 'conn',
+        kWhatDisconnected               = 'disc',
+        kWhatSeekDone                   = 'sdon',
+
+        kWhatAccessUnit                 = 'accU',
+        kWhatEOS                        = 'eos!',
+        kWhatSeekDiscontinuity          = 'seeD',
+        kWhatNormalPlayTimeMapping      = 'nptM',
+    };
+
     MyHandler(
-            const char *url, const sp<ALooper> &looper,
+            const char *url,
+            const sp<AMessage> &notify,
             bool uidValid = false, uid_t uid = 0)
-        : mUIDValid(uidValid),
+        : mNotify(notify),
+          mUIDValid(uidValid),
           mUID(uid),
-          mLooper(looper),
           mNetLooper(new ALooper),
           mConn(new ARTSPConnection(mUIDValid, mUID)),
           mRTPConn(new ARTPConnection),
@@ -118,7 +132,9 @@
           mTryFakeRTCP(false),
           mReceivedFirstRTCPPacket(false),
           mReceivedFirstRTPPacket(false),
-          mSeekable(false) {
+          mSeekable(false),
+          mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs),
+          mKeepAliveGeneration(0) {
         mNetLooper->setName("rtsp net");
         mNetLooper->start(false /* runOnCallingThread */,
                           false /* canCallJava */,
@@ -145,12 +161,9 @@
         mSessionHost = host;
     }
 
-    void connect(const sp<AMessage> &doneMsg) {
-        mDoneMsg = doneMsg;
-
-        mLooper->registerHandler(this);
-        mLooper->registerHandler(mConn);
-        (1 ? mNetLooper : mLooper)->registerHandler(mRTPConn);
+    void connect() {
+        looper()->registerHandler(mConn);
+        (1 ? mNetLooper : looper())->registerHandler(mRTPConn);
 
         sp<AMessage> notify = new AMessage('biny', id());
         mConn->observeBinaryData(notify);
@@ -159,33 +172,16 @@
         mConn->connect(mOriginalSessionURL.c_str(), reply);
     }
 
-    void disconnect(const sp<AMessage> &doneMsg) {
-        mDoneMsg = doneMsg;
-
+    void disconnect() {
         (new AMessage('abor', id()))->post();
     }
 
-    void seek(int64_t timeUs, const sp<AMessage> &doneMsg) {
+    void seek(int64_t timeUs) {
         sp<AMessage> msg = new AMessage('seek', id());
         msg->setInt64("time", timeUs);
-        msg->setMessage("doneMsg", doneMsg);
         msg->post();
     }
 
-    int64_t getNormalPlayTimeUs() {
-        int64_t maxTimeUs = 0;
-        for (size_t i = 0; i < mTracks.size(); ++i) {
-            int64_t timeUs = mTracks.editItemAt(i).mPacketSource
-                ->getNormalPlayTimeUs();
-
-            if (i == 0 || timeUs > maxTimeUs) {
-                maxTimeUs = timeUs;
-            }
-        }
-
-        return maxTimeUs;
-    }
-
     static void addRR(const sp<ABuffer> &buf) {
         uint8_t *ptr = buf->data() + buf->size();
         ptr[0] = 0x80 | 0;
@@ -379,6 +375,8 @@
 
             case 'disc':
             {
+                ++mKeepAliveGeneration;
+
                 int32_t reconnect;
                 if (msg->findInt32("reconnect", &reconnect) && reconnect) {
                     sp<AMessage> reply = new AMessage('conn', id());
@@ -465,8 +463,17 @@
                                 mBaseURL = tmp;
                             }
 
-                            CHECK_GT(mSessionDesc->countTracks(), 1u);
-                            setupTrack(1);
+                            if (mSessionDesc->countTracks() < 2) {
+                                // There's no actual tracks in this session.
+                                // The first "track" is merely session meta
+                                // data.
+
+                                LOGW("Session doesn't contain any playable "
+                                     "tracks. Aborting.");
+                                result = ERROR_UNSUPPORTED;
+                            } else {
+                                setupTrack(1);
+                            }
                         }
                     }
                 }
@@ -510,6 +517,34 @@
                         CHECK_GE(i, 0);
 
                         mSessionID = response->mHeaders.valueAt(i);
+
+                        mKeepAliveTimeoutUs = kDefaultKeepAliveTimeoutUs;
+                        AString timeoutStr;
+                        if (GetAttribute(
+                                    mSessionID.c_str(), "timeout", &timeoutStr)) {
+                            char *end;
+                            unsigned long timeoutSecs =
+                                strtoul(timeoutStr.c_str(), &end, 10);
+
+                            if (end == timeoutStr.c_str() || *end != '\0') {
+                                LOGW("server specified malformed timeout '%s'",
+                                     timeoutStr.c_str());
+
+                                mKeepAliveTimeoutUs = kDefaultKeepAliveTimeoutUs;
+                            } else if (timeoutSecs < 15) {
+                                LOGW("server specified too short a timeout "
+                                     "(%lu secs), using default.",
+                                     timeoutSecs);
+
+                                mKeepAliveTimeoutUs = kDefaultKeepAliveTimeoutUs;
+                            } else {
+                                mKeepAliveTimeoutUs = timeoutSecs * 1000000ll;
+
+                                LOGI("server specified timeout of %lu secs.",
+                                     timeoutSecs);
+                            }
+                        }
+
                         i = mSessionID.find(";");
                         if (i >= 0) {
                             // Remove options, i.e. ";timeout=90"
@@ -563,6 +598,9 @@
                 if (index < mSessionDesc->countTracks()) {
                     setupTrack(index);
                 } else if (mSetupTracksSuccessful) {
+                    ++mKeepAliveGeneration;
+                    postKeepAlive();
+
                     AString request = "PLAY ";
                     request.append(mSessionURL);
                     request.append(" RTSP/1.0\r\n");
@@ -614,12 +652,59 @@
                 break;
             }
 
+            case 'aliv':
+            {
+                int32_t generation;
+                CHECK(msg->findInt32("generation", &generation));
+
+                if (generation != mKeepAliveGeneration) {
+                    // obsolete event.
+                    break;
+                }
+
+                AString request;
+                request.append("OPTIONS ");
+                request.append(mSessionURL);
+                request.append(" RTSP/1.0\r\n");
+                request.append("Session: ");
+                request.append(mSessionID);
+                request.append("\r\n");
+                request.append("\r\n");
+
+                sp<AMessage> reply = new AMessage('opts', id());
+                reply->setInt32("generation", mKeepAliveGeneration);
+                mConn->sendRequest(request.c_str(), reply);
+                break;
+            }
+
+            case 'opts':
+            {
+                int32_t result;
+                CHECK(msg->findInt32("result", &result));
+
+                LOGI("OPTIONS completed with result %d (%s)",
+                     result, strerror(-result));
+
+                int32_t generation;
+                CHECK(msg->findInt32("generation", &generation));
+
+                if (generation != mKeepAliveGeneration) {
+                    // obsolete event.
+                    break;
+                }
+
+                postKeepAlive();
+                break;
+            }
+
             case 'abor':
             {
                 for (size_t i = 0; i < mTracks.size(); ++i) {
                     TrackInfo *info = &mTracks.editItemAt(i);
 
-                    info->mPacketSource->signalEOS(ERROR_END_OF_STREAM);
+                    if (!mFirstAccessUnit) {
+                        postQueueEOS(i, ERROR_END_OF_STREAM);
+                    }
 
                     if (!info->mUsingInterleavedTCP) {
                         mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
@@ -690,11 +775,10 @@
 
             case 'quit':
             {
-                if (mDoneMsg != NULL) {
-                    mDoneMsg->setInt32("result", UNKNOWN_ERROR);
-                    mDoneMsg->post();
-                    mDoneMsg = NULL;
-                }
+                sp<AMessage> msg = mNotify->dup();
+                msg->setInt32("what", kWhatDisconnected);
+                msg->setInt32("result", UNKNOWN_ERROR);
+                msg->post();
                 break;
             }
 
@@ -708,9 +792,13 @@
                 }
 
                 if (mNumAccessUnitsReceived == 0) {
+#if 1
                     LOGI("stream ended? aborting.");
                     (new AMessage('abor', id()))->post();
                     break;
+#else
+                    LOGI("haven't seen an AU in a looong time.");
+#endif
                 }
 
                 mNumAccessUnitsReceived = 0;
@@ -795,17 +883,12 @@
 
             case 'seek':
             {
-                sp<AMessage> doneMsg;
-                CHECK(msg->findMessage("doneMsg", &doneMsg));
-
-                if (mSeekPending) {
-                    doneMsg->post();
-                    break;
-                }
-
                 if (!mSeekable) {
                     LOGW("This is a live stream, ignoring seek request.");
-                    doneMsg->post();
+
+                    sp<AMessage> msg = mNotify->dup();
+                    msg->setInt32("what", kWhatSeekDone);
+                    msg->post();
                     break;
                 }
 
@@ -831,7 +914,6 @@
 
                 sp<AMessage> reply = new AMessage('see1', id());
                 reply->setInt64("time", timeUs);
-                reply->setMessage("doneMsg", doneMsg);
                 mConn->sendRequest(request.c_str(), reply);
                 break;
             }
@@ -842,7 +924,8 @@
                 for (size_t i = 0; i < mTracks.size(); ++i) {
                     TrackInfo *info = &mTracks.editItemAt(i);
 
-                    info->mPacketSource->flushQueue();
+                    postQueueSeekDiscontinuity(i);
+
                     info->mRTPAnchor = 0;
                     info->mNTPAnchorUs = -1;
                 }
@@ -866,11 +949,7 @@
 
                 request.append("\r\n");
 
-                sp<AMessage> doneMsg;
-                CHECK(msg->findMessage("doneMsg", &doneMsg));
-
                 sp<AMessage> reply = new AMessage('see2', id());
-                reply->setMessage("doneMsg", doneMsg);
                 mConn->sendRequest(request.c_str(), reply);
                 break;
             }
@@ -915,10 +994,9 @@
 
                 mSeekPending = false;
 
-                sp<AMessage> doneMsg;
-                CHECK(msg->findMessage("doneMsg", &doneMsg));
-
-                doneMsg->post();
+                sp<AMessage> msg = mNotify->dup();
+                msg->setInt32("what", kWhatSeekDone);
+                msg->post();
                 break;
             }
 
@@ -969,6 +1047,12 @@
         }
     }
 
+    void postKeepAlive() {
+        sp<AMessage> msg = new AMessage('aliv', id());
+        msg->setInt32("generation", mKeepAliveGeneration);
+        msg->post((mKeepAliveTimeoutUs * 9) / 10);
+    }
+
     void postAccessUnitTimeoutCheck() {
         if (mCheckPending) {
             return;
@@ -1056,8 +1140,14 @@
 
             LOGV("track #%d: rtpTime=%u <=> npt=%.2f", n, rtpTime, npt1);
 
-            info->mPacketSource->setNormalPlayTimeMapping(
-                    rtpTime, (int64_t)(npt1 * 1E6));
+            info->mNormalPlayTimeRTP = rtpTime;
+            info->mNormalPlayTimeUs = (int64_t)(npt1 * 1E6);
+
+            if (!mFirstAccessUnit) {
+                postNormalPlayTimeMapping(
+                        trackIndex,
+                        info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
+            }
 
             ++n;
         }
@@ -1065,11 +1155,15 @@
         mSeekable = true;
     }
 
-    sp<APacketSource> getPacketSource(size_t index) {
+    sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale) {
         CHECK_GE(index, 0u);
         CHECK_LT(index, mTracks.size());
 
-        return mTracks.editItemAt(index).mPacketSource;
+        const TrackInfo &info = mTracks.itemAt(index);
+
+        *timeScale = info.mTimeScale;
+
+        return info.mPacketSource->getFormat();
     }
 
     size_t countTracks() const {
@@ -1089,6 +1183,9 @@
         int64_t mNTPAnchorUs;
         int32_t mTimeScale;
 
+        uint32_t mNormalPlayTimeRTP;
+        int64_t mNormalPlayTimeUs;
+
         sp<APacketSource> mPacketSource;
 
         // Stores packets temporarily while no notion of time
@@ -1096,9 +1193,9 @@
         List<sp<ABuffer> > mPackets;
     };
 
+    sp<AMessage> mNotify;
     bool mUIDValid;
     uid_t mUID;
-    sp<ALooper> mLooper;
     sp<ALooper> mNetLooper;
     sp<ARTSPConnection> mConn;
     sp<ARTPConnection> mRTPConn;
@@ -1124,11 +1221,11 @@
     bool mReceivedFirstRTCPPacket;
     bool mReceivedFirstRTPPacket;
     bool mSeekable;
+    int64_t mKeepAliveTimeoutUs;
+    int32_t mKeepAliveGeneration;
 
     Vector<TrackInfo> mTracks;
 
-    sp<AMessage> mDoneMsg;
-
     void setupTrack(size_t index) {
         sp<APacketSource> source =
             new APacketSource(mSessionDesc, index);
@@ -1158,6 +1255,8 @@
         info->mNewSegment = true;
         info->mRTPAnchor = 0;
         info->mNTPAnchorUs = -1;
+        info->mNormalPlayTimeRTP = 0;
+        info->mNormalPlayTimeUs = 0ll;
 
         unsigned long PT;
         AString formatDesc;
@@ -1283,9 +1382,17 @@
         LOGV("onAccessUnitComplete track %d", trackIndex);
 
         if (mFirstAccessUnit) {
-            mDoneMsg->setInt32("result", OK);
-            mDoneMsg->post();
-            mDoneMsg = NULL;
+            sp<AMessage> msg = mNotify->dup();
+            msg->setInt32("what", kWhatConnected);
+            msg->post();
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                TrackInfo *info = &mTracks.editItemAt(i);
+
+                postNormalPlayTimeMapping(
+                        i,
+                        info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
+            }
 
             mFirstAccessUnit = false;
         }
@@ -1303,12 +1410,12 @@
             track->mPackets.erase(track->mPackets.begin());
 
             if (addMediaTimestamp(trackIndex, track, accessUnit)) {
-                track->mPacketSource->queueAccessUnit(accessUnit);
+                postQueueAccessUnit(trackIndex, accessUnit);
             }
         }
 
         if (addMediaTimestamp(trackIndex, track, accessUnit)) {
-            track->mPacketSource->queueAccessUnit(accessUnit);
+            postQueueAccessUnit(trackIndex, accessUnit);
         }
     }
 
@@ -1344,6 +1451,39 @@
         return true;
     }
 
+    void postQueueAccessUnit(
+            size_t trackIndex, const sp<ABuffer> &accessUnit) {
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatAccessUnit);
+        msg->setSize("trackIndex", trackIndex);
+        msg->setObject("accessUnit", accessUnit);
+        msg->post();
+    }
+
+    void postQueueEOS(size_t trackIndex, status_t finalResult) {
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatEOS);
+        msg->setSize("trackIndex", trackIndex);
+        msg->setInt32("finalResult", finalResult);
+        msg->post();
+    }
+
+    void postQueueSeekDiscontinuity(size_t trackIndex) {
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatSeekDiscontinuity);
+        msg->setSize("trackIndex", trackIndex);
+        msg->post();
+    }
+
+    void postNormalPlayTimeMapping(
+            size_t trackIndex, uint32_t rtpTime, int64_t nptUs) {
+        sp<AMessage> msg = mNotify->dup();
+        msg->setInt32("what", kWhatNormalPlayTimeMapping);
+        msg->setSize("trackIndex", trackIndex);
+        msg->setInt32("rtpTime", rtpTime);
+        msg->setInt64("nptUs", nptUs);
+        msg->post();
+    }
 
     DISALLOW_EVIL_CONSTRUCTORS(MyHandler);
 };
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 69560e5..780c0d2 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2066,7 +2066,21 @@
         // The first time a track is added we wait
         // for all its buffers to be filled before processing it
         mAudioMixer->setActiveTrack(track->name());
-        if (cblk->framesReady() && track->isReady() &&
+        // make sure that we have enough frames to mix one full buffer.
+        // enforce this condition only once to enable draining the buffer in case the client
+        // app does not call stop() and relies on underrun to stop:
+        // hence the test on (track->mRetryCount >= kMaxTrackRetries) meaning the track was mixed
+        // during last round
+        uint32_t minFrames = 1;
+        if (!track->isStopped() && !track->isPausing() &&
+                (track->mRetryCount >= kMaxTrackRetries)) {
+            if (t->sampleRate() == (int)mSampleRate) {
+                minFrames = mFrameCount;
+            } else {
+                minFrames = (mFrameCount * t->sampleRate()) / mSampleRate + 1;
+            }
+        }
+        if ((cblk->framesReady() >= minFrames) && track->isReady() &&
                 !track->isPaused() && !track->isTerminated())
         {
             //LOGV("track %d u=%08x, s=%08x [OK] on thread %p", track->name(), cblk->user, cblk->server, this);
@@ -7014,11 +7028,17 @@
 
 AudioFlinger::EffectChain::EffectChain(const wp<ThreadBase>& wThread,
                                         int sessionId)
-    : mThread(wThread), mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0),
+    : mThread(wThread), mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
       mOwnInBuffer(false), mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
       mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX)
 {
     mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
+    sp<ThreadBase> thread = mThread.promote();
+    if (thread == 0) {
+        return;
+    }
+    mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+                                    thread->frameCount();
 }
 
 AudioFlinger::EffectChain::~EffectChain()
@@ -7086,22 +7106,31 @@
     }
     bool isGlobalSession = (mSessionId == AUDIO_SESSION_OUTPUT_MIX) ||
             (mSessionId == AUDIO_SESSION_OUTPUT_STAGE);
-    bool tracksOnSession = false;
+    // always process effects unless no more tracks are on the session and the effect tail
+    // has been rendered
+    bool doProcess = true;
     if (!isGlobalSession) {
-        tracksOnSession = (trackCnt() != 0);
-    }
+        bool tracksOnSession = (trackCnt() != 0);
 
-    // if no track is active, input buffer must be cleared here as the mixer process
-    // will not do it
-    if (tracksOnSession &&
-            activeTrackCnt() == 0) {
-        size_t numSamples = thread->frameCount() * thread->channelCount();
-        memset(mInBuffer, 0, numSamples * sizeof(int16_t));
+        if (!tracksOnSession && mTailBufferCount == 0) {
+            doProcess = false;
+        }
+
+        if (activeTrackCnt() == 0) {
+            // if no track is active and the effect tail has not been rendered,
+            // the input buffer must be cleared here as the mixer process will not do it
+            if (tracksOnSession || mTailBufferCount > 0) {
+                size_t numSamples = thread->frameCount() * thread->channelCount();
+                memset(mInBuffer, 0, numSamples * sizeof(int16_t));
+                if (mTailBufferCount > 0) {
+                    mTailBufferCount--;
+                }
+            }
+        }
     }
 
     size_t size = mEffects.size();
-    // do not process effect if no track is present in same audio session
-    if (isGlobalSession || tracksOnSession) {
+    if (doProcess) {
         for (size_t i = 0; i < size; i++) {
             mEffects[i]->process();
         }
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 4b794ef..897bc78 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -1247,6 +1247,10 @@
         // corresponding to a suspend all request.
         static const int        kKeyForSuspendAll = 0;
 
+        // minimum duration during which we force calling effect process when last track on
+        // a session is stopped or removed to allow effect tail to be rendered
+        static const int        kProcessTailDurationMs = 1000;
+
         void process_l();
 
         void lock() {
@@ -1287,7 +1291,8 @@
         void decTrackCnt() { android_atomic_dec(&mTrackCnt); }
         int32_t trackCnt() { return mTrackCnt;}
 
-        void incActiveTrackCnt() { android_atomic_inc(&mActiveTrackCnt); }
+        void incActiveTrackCnt() { android_atomic_inc(&mActiveTrackCnt);
+                                   mTailBufferCount = mMaxTailBuffers; }
         void decActiveTrackCnt() { android_atomic_dec(&mActiveTrackCnt); }
         int32_t activeTrackCnt() { return mActiveTrackCnt;}
 
@@ -1338,6 +1343,8 @@
         int16_t *mOutBuffer;        // chain output buffer
         volatile int32_t mActiveTrackCnt;  // number of active tracks connected
         volatile int32_t mTrackCnt;        // number of tracks connected
+        int32_t mTailBufferCount;   // current effect tail buffer count
+        int32_t mMaxTailBuffers;    // maximum effect tail buffers
         bool mOwnInBuffer;          // true if the chain owns its input buffer
         int mVolumeCtrlIdx;         // index of insert effect having control over volume
         uint32_t mLeftVolume;       // previous volume on left channel