Merge changes from topic "ViLTE-IMP"

* changes:
  VT: AHEVCAssembler: Supports H265(HEVC) for Rx.
  VT: ARTPWriter: Supports H265(HEVC) for Tx.
  VT: ARTPWriter: Supports ipv6
  VT: ARTPSource: Jitter buffer implementation.
  VT: ARTPConnection: Supports ipv6
  VT: SFP: Implements interface for RTP parameters.
  VT: RTPSource: Added a component as an one of NuPlayer::Source
  VT: ARTPConnection: bind RTP/RTCP sockets to specific IP.
  VT: ASessionDescription: Added SDPStringFactory.
  VT: SFR: added parameters to handle RTP IP Addresses through setParameters().
  VT: ARTPWriter: Enhanced ARTPWriter as a RTP output
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 4c76fd2..134e6fe 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -40,6 +40,7 @@
     SET_DATA_SOURCE_FD,
     SET_DATA_SOURCE_STREAM,
     SET_DATA_SOURCE_CALLBACK,
+    SET_DATA_SOURCE_RTP,
     SET_BUFFERING_SETTINGS,
     GET_BUFFERING_SETTINGS,
     PREPARE_ASYNC,
@@ -161,6 +162,15 @@
         return reply.readInt32();
     }
 
+    status_t setDataSource(const String8& rtpParams) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+        data.writeString8(rtpParams);
+        remote()->transact(SET_DATA_SOURCE_RTP, data, &reply);
+
+        return reply.readInt32();
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer)
     {
@@ -685,6 +695,12 @@
             }
             return NO_ERROR;
         }
+        case SET_DATA_SOURCE_RTP: {
+            CHECK_INTERFACE(IMediaPlayer, data, reply);
+            const String8& rtpParams = data.readString8();
+            reply->writeInt32(setDataSource(rtpParams));
+            return NO_ERROR;
+        }
         case SET_VIDEO_SURFACETEXTURE: {
             CHECK_INTERFACE(IMediaPlayer, data, reply);
             sp<IGraphicBufferProducer> bufferProducer =
diff --git a/media/libmedia/include/media/IMediaPlayer.h b/media/libmedia/include/media/IMediaPlayer.h
index a4c0ec6..3548a1e 100644
--- a/media/libmedia/include/media/IMediaPlayer.h
+++ b/media/libmedia/include/media/IMediaPlayer.h
@@ -59,6 +59,7 @@
     virtual status_t        setDataSource(int fd, int64_t offset, int64_t length) = 0;
     virtual status_t        setDataSource(const sp<IStreamSource>& source) = 0;
     virtual status_t        setDataSource(const sp<IDataSource>& source) = 0;
+    virtual status_t        setDataSource(const String8& rtpParams) = 0;
     virtual status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer) = 0;
     virtual status_t        getBufferingSettings(
diff --git a/media/libmedia/include/media/mediaplayer.h b/media/libmedia/include/media/mediaplayer.h
index 7c29e50..9c5f61e 100644
--- a/media/libmedia/include/media/mediaplayer.h
+++ b/media/libmedia/include/media/mediaplayer.h
@@ -219,6 +219,7 @@
 
             status_t        setDataSource(int fd, int64_t offset, int64_t length);
             status_t        setDataSource(const sp<IDataSource> &source);
+            status_t        setDataSource(const String8& rtpParams);
             status_t        setVideoSurfaceTexture(
                                     const sp<IGraphicBufferProducer>& bufferProducer);
             status_t        setListener(const sp<MediaPlayerListener>& listener);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 6079a2d..b0db9d5 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -195,6 +195,22 @@
     return err;
 }
 
+status_t MediaPlayer::setDataSource(const String8& rtpParams)
+{
+    ALOGV("setDataSource(rtpParams)");
+    status_t err = UNKNOWN_ERROR;
+    const sp<IMediaPlayerService> service(getMediaPlayerService());
+    if (service != 0) {
+        sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
+        if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
+            (NO_ERROR != player->setDataSource(rtpParams))) {
+            player.clear();
+        }
+        err = attachNewPlayer(player);
+    }
+    return err;
+}
+
 status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
 {
     Mutex::Autolock _l(mLock);
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 016f622..4d90d98 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1063,6 +1063,17 @@
     return mStatus = setDataSource_post(p, p->setDataSource(dataSource));
 }
 
+status_t MediaPlayerService::Client::setDataSource(
+        const String8& rtpParams) {
+    player_type playerType = NU_PLAYER;
+    sp<MediaPlayerBase> p = setDataSource_pre(playerType);
+    if (p == NULL) {
+        return NO_INIT;
+    }
+    // now set data source
+    return mStatus = setDataSource_post(p, p->setDataSource(rtpParams));
+}
+
 void MediaPlayerService::Client::disconnectNativeWindow_l() {
     if (mConnectedWindow != NULL) {
         status_t err = nativeWindowDisconnect(
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index a7de3f3..b2f1b9b 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -372,6 +372,7 @@
 
         virtual status_t        setDataSource(const sp<IStreamSource> &source);
         virtual status_t        setDataSource(const sp<IDataSource> &source);
+        virtual status_t        setDataSource(const String8& rtpParams);
 
 
         sp<MediaPlayerBase>     setDataSource_pre(player_type playerType);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 71beceb..93cf5bd 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -779,6 +779,34 @@
     return OK;
 }
 
+status_t StagefrightRecorder::setParamRtpLocalIp(const String8 &localIp) {
+    ALOGV("setParamVideoLocalIp: %s", localIp.string());
+
+    mLocalIp.setTo(localIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpLocalPort(int32_t localPort) {
+    ALOGV("setParamVideoLocalPort: %d", localPort);
+
+    mLocalPort = localPort;
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemoteIp(const String8 &remoteIp) {
+    ALOGV("setParamVideoRemoteIp: %s", remoteIp.string());
+
+    mRemoteIp.setTo(remoteIp.string());
+    return OK;
+}
+
+status_t StagefrightRecorder::setParamRtpRemotePort(int32_t remotePort) {
+    ALOGV("setParamVideoRemotePort: %d", remotePort);
+
+    mRemotePort = remotePort;
+    return OK;
+}
+
 status_t StagefrightRecorder::setParameter(
         const String8 &key, const String8 &value) {
     ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -887,6 +915,20 @@
         if (safe_strtod(value.string(), &fps)) {
             return setParamCaptureFps(fps);
         }
+    } else if (key == "rtp-param-local-ip") {
+        return setParamRtpLocalIp(value);
+    } else if (key == "rtp-param-local-port") {
+        int32_t localPort;
+        if (safe_strtoi32(value.string(), &localPort)) {
+            return setParamRtpLocalPort(localPort);
+        }
+    } else if (key == "rtp-param-remote-ip") {
+        return setParamRtpRemoteIp(value);
+    } else if (key == "rtp-param-remote-port") {
+        int32_t remotePort;
+        if (safe_strtoi32(value.string(), &remotePort)) {
+            return setParamRtpRemotePort(remotePort);
+        }
     } else {
         ALOGE("setParameter: failed to find key %s", key.string());
     }
@@ -1333,7 +1375,7 @@
         mVideoEncoderSource = source;
     }
 
-    mWriter = new ARTPWriter(mOutputFd);
+    mWriter = new ARTPWriter(mOutputFd, mLocalIp, mLocalPort, mRemoteIp, mRemotePort);
     mWriter->addSource(source);
     mWriter->setListener(mListener);
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index a725bee..5f02e00 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -138,6 +138,10 @@
     int32_t mLongitudex10000;
     int32_t mStartTimeOffsetMs;
     int32_t mTotalBitRate;
+    String8 mLocalIp;
+    String8 mRemoteIp;
+    int32_t mLocalPort;
+    int32_t mRemotePort;
 
     int64_t mDurationRecordedUs;
     int64_t mStartedRecordingUs;
@@ -219,6 +223,10 @@
     status_t setParamMovieTimeScale(int32_t timeScale);
     status_t setParamGeoDataLongitude(int64_t longitudex10000);
     status_t setParamGeoDataLatitude(int64_t latitudex10000);
+    status_t setParamRtpLocalIp(const String8 &localIp);
+    status_t setParamRtpLocalPort(int32_t localPort);
+    status_t setParamRtpRemoteIp(const String8 &remoteIp);
+    status_t setParamRtpRemotePort(int32_t remotePort);
     void clipVideoBitRate();
     void clipVideoFrameRate();
     void clipVideoFrameWidth();
diff --git a/media/libmediaplayerservice/include/MediaPlayerInterface.h b/media/libmediaplayerservice/include/MediaPlayerInterface.h
index 81da5b9..8d94698 100644
--- a/media/libmediaplayerservice/include/MediaPlayerInterface.h
+++ b/media/libmediaplayerservice/include/MediaPlayerInterface.h
@@ -183,6 +183,10 @@
         return INVALID_OPERATION;
     }
 
+    virtual status_t    setDataSource(const String8& /* rtpParams */) {
+        return INVALID_OPERATION;
+    }
+
     // pass the buffered IGraphicBufferProducer to the media player service
     virtual status_t    setVideoSurfaceTexture(
                                 const sp<IGraphicBufferProducer>& bufferProducer) = 0;
diff --git a/media/libmediaplayerservice/nuplayer/Android.bp b/media/libmediaplayerservice/nuplayer/Android.bp
index 77475c1..5a1272a 100644
--- a/media/libmediaplayerservice/nuplayer/Android.bp
+++ b/media/libmediaplayerservice/nuplayer/Android.bp
@@ -33,6 +33,7 @@
         "NuPlayerRenderer.cpp",
         "NuPlayerStreamListener.cpp",
         "RTSPSource.cpp",
+        "RTPSource.cpp",
         "StreamingSource.cpp",
     ],
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c1c4b55..8b585b1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -31,6 +31,7 @@
 #include "NuPlayerDriver.h"
 #include "NuPlayerRenderer.h"
 #include "NuPlayerSource.h"
+#include "RTPSource.h"
 #include "RTSPSource.h"
 #include "StreamingSource.h"
 #include "GenericSource.h"
@@ -368,6 +369,17 @@
     return err;
 }
 
+void NuPlayer::setDataSourceAsync(const String8& rtpParams) {
+    ALOGD("setDataSourceAsync for RTP = %s", rtpParams.string());
+    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
+
+    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
+    sp<Source> source = new RTPSource(notify, rtpParams);
+
+    msg->setObject("source", source);
+    msg->post();
+}
+
 void NuPlayer::prepareAsync() {
     ALOGV("prepareAsync");
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index ef4354c..f316096 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -51,6 +51,8 @@
 
     void setDataSourceAsync(const sp<DataSource> &source);
 
+    void setDataSourceAsync(const String8& rtpParams);
+
     status_t getBufferingSettings(BufferingSettings* buffering /* nonnull */);
     status_t setBufferingSettings(const BufferingSettings& buffering);
 
@@ -117,6 +119,7 @@
     struct GenericSource;
     struct HTTPLiveSource;
     struct Renderer;
+    struct RTPSource;
     struct RTSPSource;
     struct StreamingSource;
     struct Action;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index dc144b2..2d82944 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -218,6 +218,26 @@
     return mAsyncResult;
 }
 
+status_t NuPlayerDriver::setDataSource(const String8& rtpParams) {
+    ALOGV("setDataSource(%p) rtp source", this);
+    Mutex::Autolock autoLock(mLock);
+
+    if (mState != STATE_IDLE) {
+        return INVALID_OPERATION;
+    }
+
+    mState = STATE_SET_DATASOURCE_PENDING;
+
+    mPlayer->setDataSourceAsync(rtpParams);
+
+    while (mState == STATE_SET_DATASOURCE_PENDING) {
+        mCondition.wait(mLock);
+    }
+
+    return mAsyncResult;
+}
+
+
 status_t NuPlayerDriver::setVideoSurfaceTexture(
         const sp<IGraphicBufferProducer> &bufferProducer) {
     ALOGV("setVideoSurfaceTexture(%p)", this);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index f4b1968..55a0fad 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -43,6 +43,8 @@
 
     virtual status_t setDataSource(const sp<DataSource>& dataSource);
 
+    virtual status_t setDataSource(const String8& rtpParams);
+
     virtual status_t setVideoSurfaceTexture(
             const sp<IGraphicBufferProducer> &bufferProducer);
 
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
new file mode 100644
index 0000000..de1f8a1
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -0,0 +1,708 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTPSource"
+#include <utils/Log.h>
+
+#include "RTPSource.h"
+
+
+
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <string.h>
+
+namespace android {
+
+const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
+static int32_t kMaxAllowedStaleAccessUnits = 20;
+
+NuPlayer::RTPSource::RTPSource(
+        const sp<AMessage> &notify,
+        const String8& rtpParams)
+    : Source(notify),
+      mRTPParams(rtpParams),
+      mFlags(0),
+      mState(DISCONNECTED),
+      mFinalResult(OK),
+      mBuffering(false),
+      mInPreparationPhase(true),
+      mRTPConn(new ARTPConnection),
+      mEOSTimeoutAudio(0),
+      mEOSTimeoutVideo(0) {
+      ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
+}
+
+NuPlayer::RTPSource::~RTPSource() {
+    if (mLooper != NULL) {
+        mLooper->unregisterHandler(id());
+        mLooper->unregisterHandler(mRTPConn->id());
+        mLooper->stop();
+    }
+}
+
+status_t NuPlayer::RTPSource::getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    *buffering = mBufferingSettings;
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
+    Mutex::Autolock _l(mBufferingSettingsLock);
+    mBufferingSettings = buffering;
+    return OK;
+}
+
+void NuPlayer::RTPSource::prepareAsync() {
+    if (mLooper == NULL) {
+        mLooper = new ALooper;
+        mLooper->setName("rtp");
+        mLooper->start();
+
+        mLooper->registerHandler(this);
+        mLooper->registerHandler(mRTPConn);
+    }
+
+    setParameters(mRTPParams);
+
+    TrackInfo *info = NULL;
+    unsigned i;
+    for (i = 0; i < mTracks.size(); i++) {
+        info = &mTracks.editItemAt(i);
+
+        if (info == NULL)
+            break;
+
+        AString sdp;
+        ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
+                info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
+                NULL, info->mWidth, info->mHeight);
+        ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
+
+        sp<ASessionDescription> desc = new ASessionDescription;
+        bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
+        ALOGV("RTPSource isValidSdp => %d", isValidSdp);
+
+        int sockRtp, sockRtcp;
+        ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
+                info->mLocalPort, info->mRemotePort);
+
+        sp<AMessage> notify = new AMessage('accu', this);
+
+        ALOGV("RTPSource addStream. track-index=%d", i);
+        notify->setSize("trackIndex", i);
+        // index(i) should be started from 1. 0 is reserved for [root]
+        mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
+
+        info->mRTPSocket = sockRtp;
+        info->mRTCPSocket = sockRtcp;
+        info->mFirstSeqNumInSegment = 0;
+        info->mNewSegment = true;
+        info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+        info->mRTPAnchor = 0;
+        info->mNTPAnchorUs = -1;
+        info->mNormalPlayTimeRTP = 0;
+        info->mNormalPlayTimeUs = 0ll;
+
+        // index(i) should be started from 1. 0 is reserved for [root]
+        info->mPacketSource = new APacketSource(desc, i + 1);
+
+        int32_t timeScale;
+        sp<MetaData> format = getTrackFormat(i, &timeScale);
+        sp<AnotherPacketSource> source = new AnotherPacketSource(format);
+
+        if (info->mIsAudio) {
+            mAudioTrack = source;
+        } else {
+            mVideoTrack = source;
+        }
+
+        info->mSource = source;
+    }
+
+    CHECK_EQ(mState, (int)DISCONNECTED);
+    mState = CONNECTING;
+
+    if (mInPreparationPhase) {
+        mInPreparationPhase = false;
+        notifyPrepared();
+    }
+}
+
+void NuPlayer::RTPSource::start() {
+}
+
+void NuPlayer::RTPSource::pause() {
+    mState = PAUSED;
+}
+
+void NuPlayer::RTPSource::resume() {
+    mState = CONNECTING;
+}
+
+void NuPlayer::RTPSource::stop() {
+    if (mLooper == NULL) {
+        return;
+    }
+    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
+
+    sp<AMessage> dummy;
+    msg->postAndAwaitResponse(&dummy);
+}
+
+status_t NuPlayer::RTPSource::feedMoreTSData() {
+    Mutex::Autolock _l(mBufferingLock);
+    return mFinalResult;
+}
+
+sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (source == NULL) {
+        return NULL;
+    }
+
+    return source->getFormat();
+}
+
+bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
+    // We're going to buffer at least 2 secs worth data on all tracks before
+    // starting playback (both at startup and after a seek).
+
+    static const int64_t kMinDurationUs = 2000000ll;
+
+    int64_t mediaDurationUs = 0;
+    getDuration(&mediaDurationUs);
+    if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
+            || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
+        return true;
+    }
+
+    status_t err;
+    int64_t durationUs;
+    if (mAudioTrack != NULL
+            && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    if (mVideoTrack != NULL
+            && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
+                    < kMinDurationUs
+            && err == OK) {
+        ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+              durationUs / 1E6);
+        return false;
+    }
+
+    return true;
+}
+
+status_t NuPlayer::RTPSource::dequeueAccessUnit(
+        bool audio, sp<ABuffer> *accessUnit) {
+
+    sp<AnotherPacketSource> source = getSource(audio);
+
+    if (mState == PAUSED) {
+        ALOGV("-EWOULDBLOCK");
+        return -EWOULDBLOCK;
+    }
+
+    status_t finalResult;
+    if (!source->hasBufferAvailable(&finalResult)) {
+        if (finalResult == OK) {
+            int64_t mediaDurationUs = 0;
+            getDuration(&mediaDurationUs);
+            sp<AnotherPacketSource> otherSource = getSource(!audio);
+            status_t otherFinalResult;
+
+            // If other source already signaled EOS, this source should also signal EOS
+            if (otherSource != NULL &&
+                    !otherSource->hasBufferAvailable(&otherFinalResult) &&
+                    otherFinalResult == ERROR_END_OF_STREAM) {
+                source->signalEOS(ERROR_END_OF_STREAM);
+                return ERROR_END_OF_STREAM;
+            }
+
+            // If this source has detected near end, give it some time to retrieve more
+            // data before signaling EOS
+            if (source->isFinished(mediaDurationUs)) {
+                int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
+                if (eosTimeout == 0) {
+                    setEOSTimeout(audio, ALooper::GetNowUs());
+                } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
+                    setEOSTimeout(audio, 0);
+                    source->signalEOS(ERROR_END_OF_STREAM);
+                    return ERROR_END_OF_STREAM;
+                }
+                return -EWOULDBLOCK;
+            }
+
+            if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
+                // We should not enter buffering mode
+                // if any of the sources already have detected EOS.
+                // TODO: needs to be checked whether below line is needed or not.
+                // startBufferingIfNecessary();
+            }
+
+            return -EWOULDBLOCK;
+        }
+        return finalResult;
+    }
+
+    setEOSTimeout(audio, 0);
+
+    return source->dequeueAccessUnit(accessUnit);
+}
+
+sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
+    return audio ? mAudioTrack : mVideoTrack;
+}
+
+void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
+    if (audio) {
+        mEOSTimeoutAudio = timeout;
+    } else {
+        mEOSTimeoutVideo = timeout;
+    }
+}
+
+status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
+    *durationUs = 0ll;
+
+    int64_t audioDurationUs;
+    if (mAudioTrack != NULL
+            && mAudioTrack->getFormat()->findInt64(
+                kKeyDuration, &audioDurationUs)
+            && audioDurationUs > *durationUs) {
+        *durationUs = audioDurationUs;
+    }
+
+    int64_t videoDurationUs;
+    if (mVideoTrack != NULL
+            && mVideoTrack->getFormat()->findInt64(
+                kKeyDuration, &videoDurationUs)
+            && videoDurationUs > *durationUs) {
+        *durationUs = videoDurationUs;
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
+    ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
+    return OK;
+}
+
+void NuPlayer::RTPSource::schedulePollBuffering() {
+    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
+    msg->post(1000000ll); // 1 second intervals
+}
+
+void NuPlayer::RTPSource::onPollBuffering() {
+    schedulePollBuffering();
+}
+
+void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
+    ALOGV("onMessageReceived =%d", msg->what());
+
+    switch (msg->what()) {
+        case kWhatAccessUnitComplete:
+        {
+            if (mState == CONNECTING) {
+                mState = CONNECTED;
+            }
+
+            int32_t timeUpdate;
+            //"time-update" raised from ARTPConnection::parseSR()
+            if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
+                size_t trackIndex;
+                CHECK(msg->findSize("trackIndex", &trackIndex));
+
+                uint32_t rtpTime;
+                uint64_t ntpTime;
+                CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
+                CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
+
+                onTimeUpdate(trackIndex, rtpTime, ntpTime);
+                break;
+            }
+
+            int32_t firstRTCP;
+            if (msg->findInt32("first-rtcp", &firstRTCP)) {
+                // There won't be an access unit here, it's just a notification
+                // that the data communication worked since we got the first
+                // rtcp packet.
+                ALOGV("first-rtcp");
+                break;
+            }
+
+            size_t trackIndex;
+            CHECK(msg->findSize("trackIndex", &trackIndex));
+
+            sp<ABuffer> accessUnit;
+            if (msg->findBuffer("access-unit", &accessUnit) == false) {
+                break;
+            }
+
+            int32_t damaged;
+            if (accessUnit->meta()->findInt32("damaged", &damaged)
+                    && damaged) {
+                ALOGD("dropping damaged access unit.");
+                break;
+            }
+
+            TrackInfo *info = &mTracks.editItemAt(trackIndex);
+
+            sp<AnotherPacketSource> source = info->mSource;
+            if (source != NULL) {
+                uint32_t rtpTime;
+                CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+                /* AnotherPacketSource make an assertion if there is no ntp provided
+                   RTPSource should provide ntpUs all the times.
+                if (!info->mNPTMappingValid) {
+                    // This is a live stream, we didn't receive any normal
+                    // playtime mapping. We won't map to npt time.
+                    source->queueAccessUnit(accessUnit);
+                    break;
+                }
+                */
+
+                int64_t nptUs =
+                    ((double)rtpTime - (double)info->mRTPTime)
+                        / info->mTimeScale
+                        * 1000000ll
+                        + info->mNormalPlaytimeUs;
+
+                accessUnit->meta()->setInt64("timeUs", nptUs);
+
+                source->queueAccessUnit(accessUnit);
+            }
+
+            break;
+        }
+        case kWhatDisconnect:
+        {
+            sp<AReplyToken> replyID;
+            CHECK(msg->senderAwaitsResponse(&replyID));
+
+            for (size_t i = 0; i < mTracks.size(); ++i) {
+                TrackInfo *info = &mTracks.editItemAt(i);
+
+                if (info->mIsAudio) {
+                    mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mAudioTrack = NULL;
+                    ALOGV("mAudioTrack disconnected");
+                } else {
+                    mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
+                    mVideoTrack = NULL;
+                    ALOGV("mVideoTrack disconnected");
+                }
+
+                mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
+                close(info->mRTPSocket);
+                close(info->mRTCPSocket);
+            }
+
+            mTracks.clear();
+            mFirstAccessUnit = true;
+            mAllTracksHaveTime = false;
+            mNTPAnchorUs = -1;
+            mMediaAnchorUs = -1;
+            mLastMediaTimeUs = -1;
+            mNumAccessUnitsReceived = 0;
+            mReceivedFirstRTCPPacket = false;
+            mReceivedFirstRTPPacket = false;
+            mPausing = false;
+            mPauseGeneration = 0;
+
+            (new AMessage)->postReply(replyID);
+
+            break;
+        }
+        case kWhatPollBuffering:
+            break;
+        default:
+            TRESPASS();
+    }
+}
+
+void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
+    ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
+         trackIndex, rtpTime, (long long)ntpTime);
+
+    int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
+
+    TrackInfo *track = &mTracks.editItemAt(trackIndex);
+
+    track->mRTPAnchor = rtpTime;
+    track->mNTPAnchorUs = ntpTimeUs;
+
+    if (mNTPAnchorUs < 0) {
+        mNTPAnchorUs = ntpTimeUs;
+        mMediaAnchorUs = mLastMediaTimeUs;
+    }
+
+    if (!mAllTracksHaveTime) {
+        bool allTracksHaveTime = (mTracks.size() > 0);
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *track = &mTracks.editItemAt(i);
+            if (track->mNTPAnchorUs < 0) {
+                allTracksHaveTime = false;
+                break;
+            }
+        }
+        if (allTracksHaveTime) {
+            mAllTracksHaveTime = true;
+            ALOGI("Time now established for all tracks.");
+        }
+    }
+    if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+        // Time is now established, lets start timestamping immediately
+        for (size_t i = 0; i < mTracks.size(); ++i) {
+            TrackInfo *trackInfo = &mTracks.editItemAt(i);
+            while (!trackInfo->mPackets.empty()) {
+                sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
+                trackInfo->mPackets.erase(trackInfo->mPackets.begin());
+
+                if (addMediaTimestamp(i, trackInfo, accessUnit)) {
+                    postQueueAccessUnit(i, accessUnit);
+                }
+            }
+        }
+    }
+}
+
+bool NuPlayer::RTPSource::addMediaTimestamp(
+        int32_t trackIndex, const TrackInfo *track,
+        const sp<ABuffer> &accessUnit) {
+
+    uint32_t rtpTime;
+    CHECK(accessUnit->meta()->findInt32(
+                "rtp-time", (int32_t *)&rtpTime));
+
+    int64_t relRtpTimeUs =
+        (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
+        / track->mTimeScale;
+
+    int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
+
+    int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
+
+    if (mediaTimeUs > mLastMediaTimeUs) {
+        mLastMediaTimeUs = mediaTimeUs;
+    }
+
+    if (mediaTimeUs < 0) {
+        ALOGV("dropping early accessUnit.");
+        return false;
+    }
+
+    ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
+            trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
+
+    accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
+
+    return true;
+}
+
+bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
+    TrackInfo *track;
+    for (size_t i = 0; i < mTracks.size(); ++i) {
+        track = &mTracks.editItemAt(i);
+        if (track->mPackets.empty()) {
+            return false;
+        }
+    }
+    return true;
+}
+
+void NuPlayer::RTPSource::postQueueAccessUnit(
+        size_t trackIndex, const sp<ABuffer> &accessUnit) {
+    sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
+    msg->setInt32("what", kWhatAccessUnit);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setBuffer("accessUnit", accessUnit);
+    msg->post();
+}
+
+void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
+    sp<AMessage> msg = new AMessage(kWhatEOS, this);
+    msg->setInt32("what", kWhatEOS);
+    msg->setSize("trackIndex", trackIndex);
+    msg->setInt32("finalResult", finalResult);
+    msg->post();
+}
+
+sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
+    CHECK_GE(index, 0u);
+    CHECK_LT(index, mTracks.size());
+
+    const TrackInfo &info = mTracks.itemAt(index);
+
+    *timeScale = info.mTimeScale;
+
+    return info.mPacketSource->getFormat();
+}
+
+void NuPlayer::RTPSource::onConnected() {
+    ALOGV("onConnected");
+    mState = CONNECTED;
+}
+
+void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
+    if (mState == DISCONNECTED) {
+        return;
+    }
+
+    status_t err;
+    CHECK(msg->findInt32("result", &err));
+    CHECK_NE(err, (status_t)OK);
+
+//    mLooper->unregisterHandler(mHandler->id());
+//    mHandler.clear();
+
+    if (mState == CONNECTING) {
+        // We're still in the preparation phase, signal that it
+        // failed.
+        notifyPrepared(err);
+    }
+
+    mState = DISCONNECTED;
+//    setError(err);
+
+}
+
+status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
+    ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
+
+    bool isAudioKey = key.contains("audio");
+    TrackInfo *info = NULL;
+    for (unsigned i = 0; i < mTracks.size(); ++i) {
+        info = &mTracks.editItemAt(i);
+        if (info != NULL && info->mIsAudio == isAudioKey) {
+            ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
+            break;
+        }
+    }
+
+    if (info == NULL) {
+        TrackInfo newTrackInfo;
+        newTrackInfo.mIsAudio = isAudioKey;
+        mTracks.push(newTrackInfo);
+        info = &mTracks.editTop();
+    }
+
+    if (key == "rtp-param-mime-type") {
+        info->mMimeType = value;
+
+        const char *mime = value.string();
+        const char *delimiter = strchr(mime, '/');
+        info->mCodecName = (delimiter + 1);
+
+        ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
+            info->mMimeType.string(), info->mCodecName.string());
+    } else if (key == "video-param-decoder-profile") {
+        info->mCodecProfile = atoi(value);
+    } else if (key == "video-param-decoder-level") {
+        info->mCodecLevel = atoi(value);
+    } else if (key == "video-param-width") {
+        info->mWidth = atoi(value);
+    } else if (key == "video-param-height") {
+        info->mHeight = atoi(value);
+    } else if (key == "rtp-param-local-ip") {
+        info->mLocalIp = value;
+    } else if (key == "rtp-param-local-port") {
+        info->mLocalPort = atoi(value);
+    } else if (key == "rtp-param-remote-ip") {
+        info->mRemoteIp = value;
+    } else if (key == "rtp-param-remote-port") {
+        info->mRemotePort = atoi(value);
+    } else if (key == "rtp-param-payload-type") {
+        info->mPayloadType = atoi(value);
+    } else if (key == "rtp-param-as") {
+        //AS means guaranteed bit rate that negotiated from sdp.
+        info->mAS = atoi(value);
+    } else if (key == "rtp-param-rtp-timeout") {
+    } else if (key == "rtp-param-rtcp-timeout") {
+    } else if (key == "rtp-param-time-scale") {
+    }
+
+    return OK;
+}
+
+status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
+    ALOGV("setParameters: %s", params.string());
+    const char *cparams = params.string();
+    const char *key_start = cparams;
+    for (;;) {
+        const char *equal_pos = strchr(key_start, '=');
+        if (equal_pos == NULL) {
+            ALOGE("Parameters %s miss a value", cparams);
+            return BAD_VALUE;
+        }
+        String8 key(key_start, equal_pos - key_start);
+        TrimString(&key);
+        if (key.length() == 0) {
+            ALOGE("Parameters %s contains an empty key", cparams);
+            return BAD_VALUE;
+        }
+        const char *value_start = equal_pos + 1;
+        const char *semicolon_pos = strchr(value_start, ';');
+        String8 value;
+        if (semicolon_pos == NULL) {
+            value.setTo(value_start);
+        } else {
+            value.setTo(value_start, semicolon_pos - value_start);
+        }
+        if (setParameter(key, value) != OK) {
+            return BAD_VALUE;
+        }
+        if (semicolon_pos == NULL) {
+            break;  // Reaches the end
+        }
+        key_start = semicolon_pos + 1;
+    }
+    return OK;
+}
+
+// Trim both leading and trailing whitespace from the given string.
+//static
+void NuPlayer::RTPSource::TrimString(String8 *s) {
+    size_t num_bytes = s->bytes();
+    const char *data = s->string();
+
+    size_t leading_space = 0;
+    while (leading_space < num_bytes && isspace(data[leading_space])) {
+        ++leading_space;
+    }
+
+    size_t i = num_bytes;
+    while (i > leading_space && isspace(data[i - 1])) {
+        --i;
+    }
+
+    s->setTo(String8(&data[leading_space], i - leading_space));
+}
+
+}  // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.h b/media/libmediaplayerservice/nuplayer/RTPSource.h
new file mode 100644
index 0000000..6c618ec
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.h
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_SOURCE_H_
+
+#define RTP_SOURCE_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/Utils.h>
+#include <media/BufferingSettings.h>
+
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+#include "AnotherPacketSource.h"
+#include "APacketSource.h"
+#include "ARTPConnection.h"
+#include "ASessionDescription.h"
+#include "NuPlayerSource.h"
+
+
+
+
+
+
+namespace android {
+
+struct ALooper;
+struct AnotherPacketSource;
+
+struct NuPlayer::RTPSource : public NuPlayer::Source {
+    RTPSource(
+            const sp<AMessage> &notify,
+            const String8& rtpParams);
+
+    virtual status_t getBufferingSettings(
+            BufferingSettings* buffering /* nonnull */) override;
+    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
+    virtual void prepareAsync();
+    virtual void start();
+    virtual void stop();
+    virtual void pause();
+    virtual void resume();
+
+    virtual status_t feedMoreTSData();
+
+    virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
+
+    virtual status_t getDuration(int64_t *durationUs);
+    virtual status_t seekTo(
+            int64_t seekTimeUs,
+            MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
+
+    void onMessageReceived(const sp<AMessage> &msg);
+
+protected:
+    virtual ~RTPSource();
+
+    virtual sp<MetaData> getFormatMeta(bool audio);
+
+private:
+    enum {
+        kWhatAccessUnit = 'accU',
+        kWhatAccessUnitComplete = 'accu',
+        kWhatDisconnect = 'disc',
+        kWhatEOS = 'eos!',
+        kWhatPollBuffering = 'poll',
+        kWhatSetBufferingSettings = 'sBuS',
+    };
+
+    enum State {
+        DISCONNECTED,
+        CONNECTING,
+        CONNECTED,
+        PAUSED,
+    };
+
+    struct TrackInfo {
+
+        /* SDP of track */
+        bool mIsAudio;
+        int32_t mPayloadType;
+        String8 mMimeType;
+        String8 mCodecName;
+        int32_t mCodecProfile;
+        int32_t mCodecLevel;
+        int32_t mWidth;
+        int32_t mHeight;
+        String8 mLocalIp;
+        String8 mRemoteIp;
+        int32_t mLocalPort;
+        int32_t mRemotePort;
+        int32_t mTimeScale;
+        int32_t mAS;
+
+        /* a copy of TrackInfo in RTSPSource */
+        sp<AnotherPacketSource> mSource;
+        uint32_t mRTPTime;
+        int64_t mNormalPlaytimeUs;
+        bool mNPTMappingValid;
+
+        /* a copy of TrackInfo in MyHandler.h */
+        int mRTPSocket;
+        int mRTCPSocket;
+        uint32_t mFirstSeqNumInSegment;
+        bool mNewSegment;
+        int32_t mAllowedStaleAccessUnits;
+        uint32_t mRTPAnchor;
+        int64_t mNTPAnchorUs;
+        bool mEOSReceived;
+        uint32_t mNormalPlayTimeRTP;
+        int64_t mNormalPlayTimeUs;
+        sp<APacketSource> mPacketSource;
+        List<sp<ABuffer>> mPackets;
+    };
+
+    const String8 mRTPParams;
+    uint32_t mFlags;
+    State mState;
+    status_t mFinalResult;
+
+    // below 3 parameters need to be checked whether it needed or not.
+    Mutex mBufferingLock;
+    bool mBuffering;
+    bool mInPreparationPhase;
+    Mutex mBufferingSettingsLock;
+    BufferingSettings mBufferingSettings;
+
+    sp<ALooper> mLooper;
+
+    sp<ARTPConnection> mRTPConn;
+
+    Vector<TrackInfo> mTracks;
+    sp<AnotherPacketSource> mAudioTrack;
+    sp<AnotherPacketSource> mVideoTrack;
+
+    int64_t mEOSTimeoutAudio;
+    int64_t mEOSTimeoutVideo;
+
+    /* MyHandler.h */
+    bool mFirstAccessUnit;
+    bool mAllTracksHaveTime;
+    int64_t mNTPAnchorUs;
+    int64_t mMediaAnchorUs;
+    int64_t mLastMediaTimeUs;
+    int64_t mNumAccessUnitsReceived;
+    bool mReceivedFirstRTCPPacket;
+    bool mReceivedFirstRTPPacket;
+    bool mPausing;
+    int32_t mPauseGeneration;
+
+    sp<AnotherPacketSource> getSource(bool audio);
+
+    /* MyHandler.h */
+    void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime);
+    bool addMediaTimestamp(int32_t trackIndex, const TrackInfo *track,
+            const sp<ABuffer> &accessUnit);
+    bool dataReceivedOnAllChannels();
+    void postQueueAccessUnit(size_t trackIndex, const sp<ABuffer> &accessUnit);
+    void postQueueEOS(size_t trackIndex, status_t finalResult);
+    sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale);
+    void onConnected();
+    void onDisconnected(const sp<AMessage> &msg);
+
+    void schedulePollBuffering();
+    void onPollBuffering();
+
+    bool haveSufficientDataOnAllTracks();
+
+    void setEOSTimeout(bool audio, int64_t timeout);
+
+    status_t setParameters(const String8 &params);
+    status_t setParameter(const String8 &key, const String8 &value);
+    static void TrimString(String8 *s);
+
+    DISALLOW_EVIL_CONSTRUCTORS(RTPSource);
+};
+
+}  // namespace android
+
+#endif  // RTP_SOURCE_H_
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index 64eb8b4..d455093 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -247,6 +247,8 @@
 
     // Treat empty track as malformed for MediaRecorder.
     kKeyEmptyTrackMalFormed = 'nemt', // bool (int32_t)
+    kKeySps              = 'sSps', // int32_t, indicates that a buffer is sps.
+    kKeyPps              = 'sPps', // int32_t, indicates that a buffer is pps.
 };
 
 enum {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index 4bc67e8..13d74e4 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -51,7 +51,52 @@
         return NOT_ENOUGH_DATA;
     }
 
+    sp<ABuffer> buffer = *queue->begin();
+    int32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", &rtpTime));
+    int64_t startTime = source->mFirstSysTime / 1000;
+    int64_t nowTime = ALooper::GetNowUs() / 1000;
+    int64_t playedTime = nowTime - startTime;
+    int32_t playedTimeRtp = source->mFirstRtpTime +
+        (((uint32_t)playedTime) * (source->mClockRate / 1000));
+    const int32_t jitterTime = source->mClockRate / 5;  // 200ms
+    int32_t expiredTimeInJb = rtpTime + jitterTime;
+    bool isExpired = expiredTimeInJb <= (playedTimeRtp);
+    bool isTooLate = expiredTimeInJb < (playedTimeRtp - jitterTime);
+    ALOGV("start=%lld, now=%lld, played=%lld", (long long)startTime,
+            (long long)nowTime, (long long)playedTime);
+    ALOGV("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
+            rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (isTooLate) {
+        ALOGV("buffer arrived too lately..");
+        ALOGW("start=%lld, now=%lld, played=%lld", (long long)startTime,
+                (long long)nowTime, (long long)playedTime);
+        ALOGW("rtp-time(JB)=%d, plyed-rtp-time(JB)=%d, exp-rtp-time(JB)=%d diff=%lld isExpired=%d",
+                rtpTime, playedTimeRtp, expiredTimeInJb,
+                ((long long)playedTimeRtp) - expiredTimeInJb, isExpired);
+        ALOGW("expected Seq. NO =%d", buffer->int32Data());
+
+        List<sp<ABuffer> >::iterator it = queue->begin();
+        while (it != queue->end()) {
+            CHECK((*it)->meta()->findInt32("rtp-time", &rtpTime));
+            if (rtpTime + jitterTime >= playedTimeRtp) {
+                mNextExpectedSeqNo = (*it)->int32Data();
+                break;
+            }
+            it++;
+        }
+        source->noticeAbandonBuffer();
+    }
+
     if (mNextExpectedSeqNoValid) {
+        int32_t size = queue->size();
+        int32_t cnt = 0;
         List<sp<ABuffer> >::iterator it = queue->begin();
         while (it != queue->end()) {
             if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
@@ -59,15 +104,18 @@
             }
 
             it = queue->erase(it);
+            cnt++;
         }
 
+        if (cnt > 0) {
+            source->noticeAbandonBuffer(cnt);
+            ALOGW("delete %d of %d buffers", cnt, size);
+        }
         if (queue->empty()) {
             return NOT_ENOUGH_DATA;
         }
     }
 
-    sp<ABuffer> buffer = *queue->begin();
-
     if (!mNextExpectedSeqNoValid) {
         mNextExpectedSeqNoValid = true;
         mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
new file mode 100644
index 0000000..c316471
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -0,0 +1,426 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "AHEVCAssembler"
+#include <utils/Log.h>
+
+#include "AHEVCAssembler.h"
+
+#include "ARTPSource.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+
+#include <stdint.h>
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x20
+#define H265_NALU_SPS 0x21
+#define H265_NALU_PPS 0x22
+#define H265_NALU_AP 0x30
+#define H265_NALU_FU 0x31
+#define H265_NALU_PACI 0x32
+
+
+namespace android {
+
+// static
+AHEVCAssembler::AHEVCAssembler(const sp<AMessage> &notify)
+    : mNotifyMsg(notify),
+      mAccessUnitRTPTime(0),
+      mNextExpectedSeqNoValid(false),
+      mNextExpectedSeqNo(0),
+      mAccessUnitDamaged(false) {
+
+      ALOGV("Constructor");
+}
+
+AHEVCAssembler::~AHEVCAssembler() {
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addNALUnit(
+        const sp<ARTPSource> &source) {
+    List<sp<ABuffer> > *queue = source->queue();
+
+    if (queue->empty()) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    sp<ABuffer> buffer = *queue->begin();
+    int32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", &rtpTime));
+    int64_t startTime = source->mFirstSysTime / 1000;
+    int64_t nowTime = ALooper::GetNowUs() / 1000;
+    int64_t playedTime = nowTime - startTime;
+    int32_t playedTimeRtp = source->mFirstRtpTime +
+        (((uint32_t)playedTime) * (source->mClockRate / 1000));
+    int32_t expiredTimeInJb = rtpTime + (source->mClockRate / 5);
+    bool isExpired = expiredTimeInJb <= (playedTimeRtp);
+    ALOGV("start=%lld, now=%lld, played=%lld", (long long)startTime,
+            (long long)nowTime, (long long)playedTime);
+    ALOGV("rtp-time(JB)=%d, played-rtp-time(JB)=%d, expired-rtp-time(JB)=%d isExpired=%d",
+            rtpTime, playedTimeRtp, expiredTimeInJb, isExpired);
+
+    if (!isExpired) {
+        ALOGV("buffering in jitter buffer.");
+        return NOT_ENOUGH_DATA;
+    }
+
+    if (mNextExpectedSeqNoValid) {
+        List<sp<ABuffer> >::iterator it = queue->begin();
+        while (it != queue->end()) {
+            if ((uint32_t)(*it)->int32Data() >= mNextExpectedSeqNo) {
+                break;
+            }
+
+            it = queue->erase(it);
+        }
+
+        if (queue->empty()) {
+            return NOT_ENOUGH_DATA;
+        }
+    }
+
+    if (!mNextExpectedSeqNoValid) {
+        mNextExpectedSeqNoValid = true;
+        mNextExpectedSeqNo = (uint32_t)buffer->int32Data();
+    } else if ((uint32_t)buffer->int32Data() != mNextExpectedSeqNo) {
+        ALOGV("Not the sequence number I expected");
+
+        return WRONG_SEQUENCE_NUMBER;
+    }
+
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 1 || (data[0] & 0x80)) {
+        // Corrupt.
+
+        ALOGV("Ignoring corrupt buffer.");
+        queue->erase(queue->begin());
+
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    unsigned nalType = (data[0] >> 1) & H265_NALU_MASK;
+    if (nalType > 0 && nalType < H265_NALU_AP) {
+        addSingleNALUnit(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return OK;
+    } else if (nalType == H265_NALU_FU) {
+        // FU-A
+        return addFragmentedNALUnit(queue);
+    } else if (nalType == H265_NALU_AP) {
+        // STAP-A
+        bool success = addSingleTimeAggregationPacket(buffer);
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return success ? OK : MALFORMED_PACKET;
+    } else if (nalType == 0) {
+        ALOGV("Ignoring undefined nal type.");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return OK;
+    } else {
+        ALOGV("Ignoring unsupported buffer (nalType=%d)", nalType);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+
+        return MALFORMED_PACKET;
+    }
+}
+
+void AHEVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
+    ALOGV("addSingleNALUnit of size %zu", buffer->size());
+#if !LOG_NDEBUG
+    hexdump(buffer->data(), buffer->size());
+#endif
+
+    uint32_t rtpTime;
+    CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+    if (!mNALUnits.empty() && rtpTime != mAccessUnitRTPTime) {
+        submitAccessUnit();
+    }
+    mAccessUnitRTPTime = rtpTime;
+
+    mNALUnits.push_back(buffer);
+}
+
+bool AHEVCAssembler::addSingleTimeAggregationPacket(const sp<ABuffer> &buffer) {
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    if (size < 3) {
+        ALOGV("Discarding too small STAP-A packet.");
+        return false;
+    }
+
+    ++data;
+    --size;
+    while (size >= 2) {
+        size_t nalSize = (data[0] << 8) | data[1];
+
+        if (size < nalSize + 2) {
+            ALOGV("Discarding malformed STAP-A packet.");
+            return false;
+        }
+
+        sp<ABuffer> unit = new ABuffer(nalSize);
+        memcpy(unit->data(), &data[2], nalSize);
+
+        CopyTimes(unit, buffer);
+
+        addSingleNALUnit(unit);
+
+        data += 2 + nalSize;
+        size -= 2 + nalSize;
+    }
+
+    if (size != 0) {
+        ALOGV("Unexpected padding at end of STAP-A packet.");
+    }
+
+    return true;
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::addFragmentedNALUnit(
+        List<sp<ABuffer> > *queue) {
+    CHECK(!queue->empty());
+
+    sp<ABuffer> buffer = *queue->begin();
+    const uint8_t *data = buffer->data();
+    size_t size = buffer->size();
+
+    CHECK(size > 0);
+    /*   H265 payload header is 16 bit
+        0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+       |F|     Type  |  Layer ID | TID |
+       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     */
+    unsigned indicator = (data[0] >> 1);
+
+    CHECK((indicator & H265_NALU_MASK) == H265_NALU_FU);
+
+    if (size < 2) {
+        ALOGV("Ignoring malformed FU buffer (size = %zu)", size);
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    if (!(data[2] & 0x80)) {
+        // Start bit not set on the first buffer.
+
+        ALOGV("Start bit not set on first buffer");
+
+        queue->erase(queue->begin());
+        ++mNextExpectedSeqNo;
+        return MALFORMED_PACKET;
+    }
+
+    /*  FU INDICATOR HDR
+        0 1 2 3 4 5 6 7
+       +-+-+-+-+-+-+-+-+
+       |S|E|   Type    |
+       +-+-+-+-+-+-+-+-+
+     */
+    uint32_t nalType = data[2] & H265_NALU_MASK;
+    uint32_t tid = data[1] & 0x7;
+    ALOGV("nalType =%u, tid =%u", nalType, tid);
+
+    uint32_t expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
+    size_t totalSize = size - 3;
+    size_t totalCount = 1;
+    bool complete = false;
+
+    if (data[2] & 0x40) {
+        // Huh? End bit also set on the first buffer.
+
+        ALOGV("Grrr. This isn't fragmented at all.");
+
+        complete = true;
+    } else {
+        List<sp<ABuffer> >::iterator it = ++queue->begin();
+        while (it != queue->end()) {
+            ALOGV("sequence length %zu", totalCount);
+
+            const sp<ABuffer> &buffer = *it;
+
+            const uint8_t *data = buffer->data();
+            size_t size = buffer->size();
+
+            if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
+                ALOGV("sequence not complete, expected seqNo %d, got %d",
+                     expectedSeqNo, (uint32_t)buffer->int32Data());
+
+                return WRONG_SEQUENCE_NUMBER;
+            }
+
+            if (size < 3
+                    || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+                    || (data[2] & H265_NALU_MASK) != nalType
+                    || (data[2] & 0x80)) {
+                ALOGV("Ignoring malformed FU buffer.");
+
+                // Delete the whole start of the FU.
+
+                it = queue->begin();
+                for (size_t i = 0; i <= totalCount; ++i) {
+                    it = queue->erase(it);
+                }
+
+                mNextExpectedSeqNo = expectedSeqNo + 1;
+
+                return MALFORMED_PACKET;
+            }
+
+            totalSize += size - 3;
+            ++totalCount;
+
+            expectedSeqNo = expectedSeqNo + 1;
+
+            if (data[2] & 0x40) {
+                // This is the last fragment.
+                complete = true;
+                break;
+            }
+
+            ++it;
+        }
+    }
+
+    if (!complete) {
+        return NOT_ENOUGH_DATA;
+    }
+
+    mNextExpectedSeqNo = expectedSeqNo;
+
+    // We found all the fragments that make up the complete NAL unit.
+
+    // Leave room for the header. So far totalSize did not include the
+    // header byte.
+    totalSize += 2;
+
+    sp<ABuffer> unit = new ABuffer(totalSize);
+    CopyTimes(unit, *queue->begin());
+
+    unit->data()[0] = (nalType << 1);
+    unit->data()[1] = tid;
+
+    size_t offset = 2;
+    List<sp<ABuffer> >::iterator it = queue->begin();
+    for (size_t i = 0; i < totalCount; ++i) {
+        const sp<ABuffer> &buffer = *it;
+
+        ALOGV("piece #%zu/%zu", i + 1, totalCount);
+#if !LOG_NDEBUG
+        hexdump(buffer->data(), buffer->size());
+#endif
+
+        memcpy(unit->data() + offset, buffer->data() + 3, buffer->size() - 3);
+        offset += buffer->size() - 3;
+
+        it = queue->erase(it);
+    }
+
+    unit->setRange(0, totalSize);
+
+    addSingleNALUnit(unit);
+
+    ALOGV("successfully assembled a NAL unit from fragments.");
+
+    return OK;
+}
+
+void AHEVCAssembler::submitAccessUnit() {
+    CHECK(!mNALUnits.empty());
+
+    ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
+
+    size_t totalSize = 0;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        totalSize += 4 + (*it)->size();
+    }
+
+    sp<ABuffer> accessUnit = new ABuffer(totalSize);
+    size_t offset = 0;
+    for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
+         it != mNALUnits.end(); ++it) {
+        memcpy(accessUnit->data() + offset, "\x00\x00\x00\x01", 4);
+        offset += 4;
+
+        sp<ABuffer> nal = *it;
+        memcpy(accessUnit->data() + offset, nal->data(), nal->size());
+        offset += nal->size();
+    }
+
+    CopyTimes(accessUnit, *mNALUnits.begin());
+
+#if 0
+    printf(mAccessUnitDamaged ? "X" : ".");
+    fflush(stdout);
+#endif
+
+    if (mAccessUnitDamaged) {
+        accessUnit->meta()->setInt32("damaged", true);
+    }
+
+    mNALUnits.clear();
+    mAccessUnitDamaged = false;
+
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setBuffer("access-unit", accessUnit);
+    msg->post();
+}
+
+ARTPAssembler::AssemblyStatus AHEVCAssembler::assembleMore(
+        const sp<ARTPSource> &source) {
+    AssemblyStatus status = addNALUnit(source);
+    if (status == MALFORMED_PACKET) {
+        mAccessUnitDamaged = true;
+    }
+    return status;
+}
+
+void AHEVCAssembler::packetLost() {
+    CHECK(mNextExpectedSeqNoValid);
+    ALOGV("packetLost (expected %d)", mNextExpectedSeqNo);
+
+    ++mNextExpectedSeqNo;
+
+    mAccessUnitDamaged = true;
+}
+
+void AHEVCAssembler::onByeReceived() {
+    sp<AMessage> msg = mNotifyMsg->dup();
+    msg->setInt32("eos", true);
+    msg->post();
+}
+
+}  // namespace android
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
new file mode 100644
index 0000000..cc20622
--- /dev/null
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef A_HEVC_ASSEMBLER_H_
+
+#define A_HEVC_ASSEMBLER_H_
+
+#include "ARTPAssembler.h"
+
+#include <utils/List.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct ABuffer;
+struct AMessage;
+
+struct AHEVCAssembler : public ARTPAssembler {
+    AHEVCAssembler(const sp<AMessage> &notify);
+
+protected:
+    virtual ~AHEVCAssembler();
+
+    virtual AssemblyStatus assembleMore(const sp<ARTPSource> &source);
+    virtual void onByeReceived();
+    virtual void packetLost();
+
+private:
+    sp<AMessage> mNotifyMsg;
+
+    uint32_t mAccessUnitRTPTime;
+    bool mNextExpectedSeqNoValid;
+    uint32_t mNextExpectedSeqNo;
+    bool mAccessUnitDamaged;
+    List<sp<ABuffer> > mNALUnits;
+
+    AssemblyStatus addNALUnit(const sp<ARTPSource> &source);
+    void addSingleNALUnit(const sp<ABuffer> &buffer);
+    AssemblyStatus addFragmentedNALUnit(List<sp<ABuffer> > *queue);
+    bool addSingleTimeAggregationPacket(const sp<ABuffer> &buffer);
+
+    void submitAccessUnit();
+
+    DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
+};
+
+}  // namespace android
+
+#endif  // A_HEVC_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 574bd7a..8f4df8e 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -454,6 +454,17 @@
 
         mFormat->setInt32(kKeyWidth, width);
         mFormat->setInt32(kKeyHeight, height);
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+        int32_t width, height;
+        if (!sessionDesc->getDimensions(index, PT, &width, &height)) {
+            width = -1;
+            height = -1;
+        }
+
+        mFormat->setInt32(kKeyWidth, width);
+        mFormat->setInt32(kKeyHeight, height);
     } else if (!strncmp(desc.c_str(), "H263-2000/", 10)
             || !strncmp(desc.c_str(), "H263-1998/", 10)) {
         mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 7b36875..12cce00 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -53,6 +53,7 @@
 const int64_t ARTPConnection::kSelectTimeoutUs = 1000LL;
 
 struct ARTPConnection::StreamInfo {
+    bool isIPv6;
     int mRTPSocket;
     int mRTCPSocket;
     sp<ASessionDescription> mSessionDesc;
@@ -63,6 +64,7 @@
     int64_t mNumRTCPPacketsReceived;
     int64_t mNumRTPPacketsReceived;
     struct sockaddr_in mRemoteRTCPAddr;
+    struct sockaddr_in6 mRemoteRTCPAddr6;
 
     bool mIsInjected;
 };
@@ -152,6 +154,101 @@
     TRESPASS();
 }
 
+// static
+void ARTPConnection::MakeRTPSocketPair(
+        int *rtpSocket, int *rtcpSocket, const char *localIp, const char *remoteIp,
+        unsigned localPort, unsigned remotePort) {
+    bool isIPv6 = false;
+    if (strchr(localIp, ':') != NULL)
+        isIPv6 = true;
+
+    *rtpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtpSocket, 0);
+
+    bumpSocketBufferSize(*rtpSocket);
+
+    *rtcpSocket = socket(isIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(*rtcpSocket, 0);
+
+    bumpSocketBufferSize(*rtcpSocket);
+
+    struct sockaddr *addr;
+    struct sockaddr_in addr4;
+    struct sockaddr_in6 addr6;
+
+    if (isIPv6) {
+        addr = (struct sockaddr *)&addr6;
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)localPort);
+    } else {
+        addr = (struct sockaddr *)&addr4;
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(localIp);
+        addr4.sin_port = htons((uint16_t)localPort);
+    }
+
+    int sockopt = 1;
+    setsockopt(*rtpSocket, SOL_SOCKET, SO_REUSEPORT, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(*rtcpSocket, SOL_SOCKET, SO_REUSEPORT, (int *)&sockopt, sizeof(sockopt));
+
+    int sizeSockSt = isIPv6 ? sizeof(addr6) : sizeof(addr4);
+
+    if (bind(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully binded. addr=%s:%d", localIp, localPort);
+    } else {
+        ALOGE("failed to bind rtp socket addr=%s:%d err=%s", localIp, localPort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(localPort + 1);
+    else
+        addr4.sin_port = htons(localPort + 1);
+
+    if (bind(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully binded. addr=%s:%d", localIp, localPort + 1);
+    } else {
+        ALOGE("failed to bind rtcp socket addr=%s:%d err=%s", localIp,
+                localPort + 1, strerror(errno));
+    }
+
+    // Re uses addr variable as remote addr.
+    if (isIPv6) {
+        memset(&addr6, 0, sizeof(addr6));
+        addr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp, &addr6.sin6_addr);
+        addr6.sin6_port = htons((uint16_t)remotePort);
+    } else {
+        memset(&addr4, 0, sizeof(addr4));
+        addr4.sin_family = AF_INET;
+        addr4.sin_addr.s_addr = inet_addr(remoteIp);
+        addr4.sin_port = htons((uint16_t)remotePort);
+    }
+    if (connect(*rtpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtp socket successfully connected to remote=%s:%d", remoteIp, remotePort);
+    } else {
+        ALOGE("failed to connect rtp socket to remote addr=%s:%d err=%s", remoteIp,
+                remotePort, strerror(errno));
+        return;
+    }
+
+    if (isIPv6)
+        addr6.sin6_port = htons(remotePort + 1);
+    else
+        addr4.sin_port = htons(remotePort + 1);
+
+    if (connect(*rtcpSocket, addr, sizeSockSt) == 0) {
+        ALOGI("rtcp socket successfully connected to remote=%s:%d", remoteIp, remotePort + 1);
+    } else {
+        ALOGE("failed to connect rtcp socket addr=%s:%d err=%s", remoteIp,
+                remotePort + 1, strerror(errno));
+        return;
+    }
+}
+
 void ARTPConnection::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatAddStream:
@@ -211,6 +308,7 @@
     info->mNumRTCPPacketsReceived = 0;
     info->mNumRTPPacketsReceived = 0;
     memset(&info->mRemoteRTCPAddr, 0, sizeof(info->mRemoteRTCPAddr));
+    memset(&info->mRemoteRTCPAddr6, 0, sizeof(info->mRemoteRTCPAddr6));
 
     if (!injected) {
         postPollEvent();
@@ -347,12 +445,21 @@
             if (buffer->size() > 0) {
                 ALOGV("Sending RR...");
 
+                struct sockaddr* pRemoteRTCPAddr;
+                int sizeSockSt;
+                if (s->isIPv6) {
+                    pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
+                    sizeSockSt = sizeof(struct sockaddr_in6);
+                } else {
+                    pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
+                    sizeSockSt = sizeof(struct sockaddr_in);
+                }
+
                 ssize_t n;
                 do {
                     n = sendto(
-                        s->mRTCPSocket, buffer->data(), buffer->size(), 0,
-                        (const struct sockaddr *)&s->mRemoteRTCPAddr,
-                        sizeof(s->mRemoteRTCPAddr));
+                            s->mRTCPSocket, buffer->data(), buffer->size(), 0,
+                            pRemoteRTCPAddr, sizeSockSt);
                 } while (n < 0 && errno == EINTR);
 
                 if (n <= 0) {
@@ -384,9 +491,18 @@
 
     sp<ABuffer> buffer = new ABuffer(65536);
 
+    struct sockaddr *pRemoteRTCPAddr;
+    int sizeSockSt;
+    if (s->isIPv6) {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr6;
+        sizeSockSt = sizeof(struct sockaddr_in6);
+    } else {
+        pRemoteRTCPAddr = (struct sockaddr *)&s->mRemoteRTCPAddr;
+        sizeSockSt = sizeof(struct sockaddr_in);
+    }
     socklen_t remoteAddrLen =
         (!receiveRTP && s->mNumRTCPPacketsReceived == 0)
-            ? sizeof(s->mRemoteRTCPAddr) : 0;
+            ? sizeSockSt : 0;
 
     ssize_t nbytes;
     do {
@@ -395,7 +511,7 @@
             buffer->data(),
             buffer->capacity(),
             0,
-            remoteAddrLen > 0 ? (struct sockaddr *)&s->mRemoteRTCPAddr : NULL,
+            remoteAddrLen > 0 ? pRemoteRTCPAddr : NULL,
             remoteAddrLen > 0 ? &remoteAddrLen : NULL);
     } while (nbytes < 0 && errno == EINTR);
 
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index d5f7c2e..889ec30 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -49,6 +49,13 @@
     // next higher port).
     static void MakePortPair(
             int *rtpSocket, int *rtcpSocket, unsigned *rtpPort);
+    // Creates a pair of UDP datagram sockets bound to assigned ip and
+    // ports (the rtpSocket is bound to an even port, the rtcpSocket
+    // to the next higher port).
+    static void MakeRTPSocketPair(
+            int *rtpSocket, int *rtcpSocket,
+            const char *localIp, const char *remoteIp,
+            unsigned localPort, unsigned remotePort);
 
 protected:
     virtual ~ARTPConnection();
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index f5f8128..e271ac1 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -22,6 +22,7 @@
 
 #include "AAMRAssembler.h"
 #include "AAVCAssembler.h"
+#include "AHEVCAssembler.h"
 #include "AH263Assembler.h"
 #include "AMPEG2TSAssembler.h"
 #include "AMPEG4AudioAssembler.h"
@@ -41,7 +42,11 @@
         uint32_t id,
         const sp<ASessionDescription> &sessionDesc, size_t index,
         const sp<AMessage> &notify)
-    : mID(id),
+    : mFirstSeqNumber(0),
+      mFirstRtpTime(0),
+      mFirstSysTime(0),
+      mClockRate(0),
+      mID(id),
       mHighestSeqNumber(0),
       mPrevExpected(0),
       mBaseSeqNumber(0),
@@ -61,6 +66,9 @@
     if (!strncmp(desc.c_str(), "H264/", 5)) {
         mAssembler = new AAVCAssembler(notify);
         mIssueFIRRequests = true;
+    } else if (!strncmp(desc.c_str(), "H265/", 5)) {
+        mAssembler = new AHEVCAssembler(notify);
+        mIssueFIRRequests = true;
     } else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
         mAssembler = new AMPEG4AudioAssembler(notify, params);
     } else if (!strncmp(desc.c_str(), "H263-1998/", 10)
@@ -112,9 +120,16 @@
 bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
     uint32_t seqNum = (uint32_t)buffer->int32Data();
 
-    if (mNumBuffersReceived++ == 0) {
+    if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
+        int32_t firstRtpTime;
+        CHECK(buffer->meta()->findInt32("rtp-time", &firstRtpTime));
+        mFirstSysTime = ALooper::GetNowUs();
         mHighestSeqNumber = seqNum;
         mBaseSeqNumber = seqNum;
+        mFirstRtpTime = firstRtpTime;
+        ALOGV("first-rtp arrived: first-rtp-time=%d, sys-time=%lld, seq-num=%u",
+                mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber);
+        mClockRate = 90000;
         mQueue.push_back(buffer);
         return true;
     }
@@ -306,6 +321,9 @@
     buffer->setRange(buffer->offset(), buffer->size() + 32);
 }
 
+void ARTPSource::noticeAbandonBuffer(int cnt) {
+    mNumBuffersReceived -= cnt;
+}
 }  // namespace android
 
 
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index f44e83f..12de18d 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -46,6 +46,13 @@
     void addReceiverReport(const sp<ABuffer> &buffer);
     void addFIR(const sp<ABuffer> &buffer);
 
+    void noticeAbandonBuffer(int cnt=1);
+
+    int32_t mFirstSeqNumber;
+    int32_t mFirstRtpTime;
+    int64_t mFirstSysTime;
+    int32_t mClockRate;
+
 private:
     uint32_t mID;
     uint32_t mHighestSeqNumber;
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 58d6086..65c8189 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -35,6 +35,24 @@
 #define PT      97
 #define PT_STR  "97"
 
+#define H264_NALU_MASK 0x1F
+#define H264_NALU_SPS 0x7
+#define H264_NALU_PPS 0x8
+#define H264_NALU_IFRAME 0x5
+#define H264_NALU_PFRAME 0x1
+
+#define H265_NALU_MASK 0x3F
+#define H265_NALU_VPS 0x40
+#define H265_NALU_SPS 0x42
+#define H265_NALU_PPS 0x44
+
+#define UDP_HEADER_SIZE 8
+#define RTP_HEADER_SIZE 12
+#define RTP_HEADER_EXT_SIZE 1
+#define RTP_FU_HEADER_SIZE 2
+#define RTP_PAYLOAD_ROOM_SIZE 140
+
+
 namespace android {
 
 // static const size_t kMaxPacketSize = 65507;  // maximum payload in UDP over IP
@@ -50,13 +68,16 @@
       mLooper(new ALooper),
       mReflector(new AHandlerReflector<ARTPWriter>(this)) {
     CHECK_GE(fd, 0);
+    mIsIPv6 = false;
 
     mLooper->setName("rtp writer");
     mLooper->registerHandler(mReflector);
     mLooper->start();
 
-    mSocket = socket(AF_INET, SOCK_DGRAM, 0);
-    CHECK_GE(mSocket, 0);
+    mRTPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
 
     memset(mRTPAddr.sin_zero, 0, sizeof(mRTPAddr.sin_zero));
     mRTPAddr.sin_family = AF_INET;
@@ -72,6 +93,40 @@
 
     mRTCPAddr = mRTPAddr;
     mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
+
+#if LOG_TO_FILES
+    mRTPFd = open(
+            "/data/misc/rtpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTPFd, 0);
+
+    mRTCPFd = open(
+            "/data/misc/rtcpout.bin",
+            O_WRONLY | O_CREAT | O_TRUNC,
+            0644);
+    CHECK_GE(mRTCPFd, 0);
+#endif
+}
+
+ARTPWriter::ARTPWriter(int fd, String8& localIp, int localPort, String8& remoteIp,
+    int remotePort)
+    : mFlags(0),
+      mFd(dup(fd)),
+      mLooper(new ALooper),
+      mReflector(new AHandlerReflector<ARTPWriter>(this)) {
+    CHECK_GE(fd, 0);
+    mIsIPv6 = false;
+
+    mLooper->setName("rtp writer");
+    mLooper->registerHandler(mReflector);
+    mLooper->start();
+
+    makeSocketPairAndBind(localIp, localPort, remoteIp , remotePort);
+    mSPSBuf = NULL;
+    mPPSBuf = NULL;
 
 #if LOG_TO_FILES
     mRTPFd = open(
@@ -97,11 +152,24 @@
     mRTPFd = -1;
 #endif
 
-    close(mSocket);
-    mSocket = -1;
+    close(mRTPSocket);
+    mRTPSocket = -1;
+
+    close(mRTCPSocket);
+    mRTCPSocket = -1;
 
     close(mFd);
     mFd = -1;
+
+    if(mSPSBuf != NULL) {
+        mSPSBuf->release();
+        mSPSBuf = NULL;
+    }
+
+    if(mPPSBuf != NULL) {
+        mPPSBuf->release();
+        mPPSBuf = NULL;
+    }
 }
 
 status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
@@ -123,7 +191,7 @@
     mFlags &= ~kFlagEOS;
     mSourceID = rand();
     mSeqNo = UniformRand(65536);
-    mRTPTimeBase = rand();
+    mRTPTimeBase = 0;
     mNumRTPSent = 0;
     mNumRTPOctetsSent = 0;
     mLastRTPTime = 0;
@@ -136,6 +204,8 @@
     mMode = INVALID;
     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
         mMode = H264;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+        mMode = H265;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
         mMode = H263;
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
@@ -173,9 +243,12 @@
     return OK;
 }
 
-static void StripStartcode(MediaBufferBase *buffer) {
+// return size of SPS if there is more NAL unit found following to SPS.
+static uint32_t StripStartcode(MediaBufferBase *buffer) {
+    uint32_t nalSize = 0;
+
     if (buffer->range_length() < 4) {
-        return;
+        return 0;
     }
 
     const uint8_t *ptr =
@@ -185,6 +258,56 @@
         buffer->set_range(
                 buffer->range_offset() + 4, buffer->range_length() - 4);
     }
+
+    ptr = (const uint8_t *)buffer->data() + buffer->range_offset();
+
+    if ((*ptr & H264_NALU_MASK) == H264_NALU_SPS) {
+        for (uint32_t i = 0; i < buffer->range_length(); i++) {
+
+            if (!memcmp(ptr + i, "\x00\x00\x00\x01", 4)) {
+                // Now, we found one more NAL unit in the media buffer.
+                // Mostly, it will be a PPS.
+                nalSize = i;
+                ALOGV("SPS found. size=%d", nalSize);
+            }
+        }
+    }
+
+    return nalSize;
+}
+
+static void SpsPpsParser(MediaBufferBase *mediaBuffer,
+    MediaBufferBase **spsBuffer, MediaBufferBase **ppsBuffer, uint32_t spsSize) {
+
+    if (mediaBuffer == NULL || mediaBuffer->range_length() < 4)
+        return;
+
+    if((*spsBuffer) != NULL) {
+        (*spsBuffer)->release();
+        (*spsBuffer) = NULL;
+    }
+
+    if((*ppsBuffer) != NULL) {
+        (*ppsBuffer)->release();
+        (*ppsBuffer) = NULL;
+    }
+
+    // we got sps/pps but startcode of sps is striped.
+    (*spsBuffer) = MediaBufferBase::Create(spsSize);
+    int32_t ppsSize = mediaBuffer->range_length() - spsSize - 4/*startcode*/;
+    (*ppsBuffer) = MediaBufferBase::Create(ppsSize);
+    memcpy((*spsBuffer)->data(),
+            (const uint8_t *)mediaBuffer->data() + mediaBuffer->range_offset(),
+            spsSize);
+
+    if (ppsSize > 0) {
+        ALOGV("PPS found. size=%d", (int)ppsSize);
+        mediaBuffer->set_range(mediaBuffer->range_offset() + spsSize + 4/*startcode*/,
+                mediaBuffer->range_length() - spsSize - 4/*startcode*/);
+        memcpy((*ppsBuffer)->data(),
+                (const uint8_t *)mediaBuffer->data() + mediaBuffer->range_offset(),
+                ppsSize);
+    }
 }
 
 void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) {
@@ -280,8 +403,15 @@
         ALOGV("read buffer of size %zu", mediaBuf->range_length());
 
         if (mMode == H264) {
+            uint32_t spsSize = 0;
+            if ((spsSize = StripStartcode(mediaBuf)) > 0) {
+                SpsPpsParser(mediaBuf, &mSPSBuf, &mPPSBuf, spsSize);
+            } else {
+                sendAVCData(mediaBuf);
+            }
+        } else if (mMode == H265) {
             StripStartcode(mediaBuf);
-            sendAVCData(mediaBuf);
+            sendHEVCData(mediaBuf);
         } else if (mMode == H263) {
             sendH263Data(mediaBuf);
         } else if (mMode == AMR_NB || mMode == AMR_WB) {
@@ -309,10 +439,25 @@
 }
 
 void ARTPWriter::send(const sp<ABuffer> &buffer, bool isRTCP) {
-    ssize_t n = sendto(
-            mSocket, buffer->data(), buffer->size(), 0,
-            (const struct sockaddr *)(isRTCP ? &mRTCPAddr : &mRTPAddr),
-            sizeof(mRTCPAddr));
+    int sizeSockSt;
+    struct sockaddr *remAddr;
+
+    if (mIsIPv6) {
+        sizeSockSt = sizeof(struct sockaddr_in6);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr6;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr6;
+    } else {
+        sizeSockSt = sizeof(struct sockaddr_in);
+        if (isRTCP)
+            remAddr = (struct sockaddr *)&mRTCPAddr;
+        else
+            remAddr = (struct sockaddr *)&mRTPAddr;
+    }
+
+    ssize_t n = sendto(isRTCP ? mRTCPSocket : mRTPSocket,
+            buffer->data(), buffer->size(), 0, remAddr, sizeSockSt);
 
     CHECK_EQ(n, (ssize_t)buffer->size());
 
@@ -463,7 +608,7 @@
         sdp.append("m=audio ");
     }
 
-    sdp.append(AStringPrintf("%d", ntohs(mRTPAddr.sin_port)));
+    sdp.append(AStringPrintf("%d", mIsIPv6 ? ntohs(mRTPAddr6.sin6_port) : ntohs(mRTPAddr.sin_port)));
     sdp.append(
           " RTP/AVP " PT_STR "\r\n"
           "b=AS 320000\r\n"
@@ -569,6 +714,151 @@
     send(buffer, true /* isRTCP */);
 }
 
+void ARTPWriter::sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs) {
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    if ((mediaData[0] & H264_NALU_MASK) != H264_NALU_IFRAME)
+        return;
+
+    if (mSPSBuf != NULL) {
+        mSPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mSPSBuf->meta_data().setInt32(kKeySps, 1);
+        sendAVCData(mSPSBuf);
+    }
+
+    if (mPPSBuf != NULL) {
+        mPPSBuf->meta_data().setInt64(kKeyTime, timeUs);
+        mPPSBuf->meta_data().setInt32(kKeyPps, 1);
+        sendAVCData(mPPSBuf);
+    }
+}
+
+void ARTPWriter::sendHEVCData(MediaBufferBase *mediaBuf) {
+    // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
+    CHECK_GE(kMaxPacketSize, 12u + 2u);
+
+    int64_t timeUs;
+    CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+    uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+
+    const uint8_t *mediaData =
+        (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+    sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
+
+    if (mediaBuf->range_length() + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE
+            <= buffer->capacity()) {
+        // The data fits into a single packet
+        uint8_t *data = buffer->data();
+        data[0] = 0x80;
+        data[1] = (1 << 7) | PT;  // M-bit
+        data[2] = (mSeqNo >> 8) & 0xff;
+        data[3] = mSeqNo & 0xff;
+        data[4] = rtpTime >> 24;
+        data[5] = (rtpTime >> 16) & 0xff;
+        data[6] = (rtpTime >> 8) & 0xff;
+        data[7] = rtpTime & 0xff;
+        data[8] = mSourceID >> 24;
+        data[9] = (mSourceID >> 16) & 0xff;
+        data[10] = (mSourceID >> 8) & 0xff;
+        data[11] = mSourceID & 0xff;
+
+        memcpy(&data[12],
+               mediaData, mediaBuf->range_length());
+
+        buffer->setRange(0, mediaBuf->range_length() + 12);
+
+        send(buffer, false /* isRTCP */);
+
+        ++mSeqNo;
+        ++mNumRTPSent;
+        mNumRTPOctetsSent += buffer->size() - 12;
+    } else {
+        // FU-A
+
+        unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
+        ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+        size_t offset = 2; //H265 payload header is 16 bit.
+
+        bool firstPacket = true;
+        while (offset < mediaBuf->range_length()) {
+            size_t size = mediaBuf->range_length() - offset;
+            bool lastPacket = true;
+            if (size + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_FU_HEADER_SIZE +
+                    RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+                lastPacket = false;
+                size = buffer->capacity() - UDP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+            }
+
+            uint8_t *data = buffer->data();
+            data[0] = 0x80;
+            data[1] = (lastPacket ? (1 << 7) : 0x00) | PT;  // M-bit
+            data[2] = (mSeqNo >> 8) & 0xff;
+            data[3] = mSeqNo & 0xff;
+            data[4] = rtpTime >> 24;
+            data[5] = (rtpTime >> 16) & 0xff;
+            data[6] = (rtpTime >> 8) & 0xff;
+            data[7] = rtpTime & 0xff;
+            data[8] = mSourceID >> 24;
+            data[9] = (mSourceID >> 16) & 0xff;
+            data[10] = (mSourceID >> 8) & 0xff;
+            data[11] = mSourceID & 0xff;
+
+            /*  H265 payload header is 16 bit
+                 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |F|     Type  |  Layer ID | TID |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
+            // excludes Type from 1st byte of H265 payload header.
+            data[12] = mediaData[0] & 0x81;
+            // fills Type as FU (49 == 0x31)
+            data[12] = data[12] | (0x31 << 1);
+            data[13] = mediaData[1];
+
+            ALOGV("H265 FU header 0x%x %x", data[12], data[13]);
+
+            CHECK(!firstPacket || !lastPacket);
+            /*
+                FU INDICATOR HDR
+                0 1 2 3 4 5 6 7
+                +-+-+-+-+-+-+-+
+                |S|E|   Type  |
+                +-+-+-+-+-+-+-+
+            */
+
+            data[14] =
+                (firstPacket ? 0x80 : 0x00)
+                | (lastPacket ? 0x40 : 0x00)
+                | (nalType & H265_NALU_MASK);
+            ALOGV("H265 FU indicator 0x%x", data[14]);
+
+            memcpy(&data[15], &mediaData[offset], size);
+
+            buffer->setRange(0, 15 + size);
+
+            send(buffer, false /* isRTCP */);
+
+            ++mSeqNo;
+            ++mNumRTPSent;
+            mNumRTPOctetsSent += buffer->size() - 12;
+
+            firstPacket = false;
+            offset += size;
+        }
+    }
+
+    mLastRTPTime = rtpTime;
+    mLastNTPTime = GetNowNTP();
+
+}
+
 void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
     // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
     CHECK_GE(kMaxPacketSize, 12u + 2u);
@@ -576,17 +866,30 @@
     int64_t timeUs;
     CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
 
+    sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
     uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
 
     const uint8_t *mediaData =
         (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
 
+    int32_t sps, pps;
+    bool isSpsPps = false;
+    if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
+            mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
+        isSpsPps = true;
+    }
+
     sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-    if (mediaBuf->range_length() + 12 <= buffer->capacity()) {
+    if (mediaBuf->range_length() + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE
+            <= buffer->capacity()) {
         // The data fits into a single packet
         uint8_t *data = buffer->data();
         data[0] = 0x80;
-        data[1] = (1 << 7) | PT;  // M-bit
+        if (isSpsPps)
+            data[1] = PT;  // Marker bit should not be set in case of sps/pps
+        else
+            data[1] = (1 << 7) | PT;
         data[2] = (mSeqNo >> 8) & 0xff;
         data[3] = mSeqNo & 0xff;
         data[4] = rtpTime >> 24;
@@ -618,9 +921,11 @@
         while (offset < mediaBuf->range_length()) {
             size_t size = mediaBuf->range_length() - offset;
             bool lastPacket = true;
-            if (size + 12 + 2 > buffer->capacity()) {
+            if (size + UDP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_FU_HEADER_SIZE +
+                    RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
                 lastPacket = false;
-                size = buffer->capacity() - 12 - 2;
+                size = buffer->capacity() - UDP_HEADER_SIZE - RTP_HEADER_SIZE -
+                    RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
             }
 
             uint8_t *data = buffer->data();
@@ -834,5 +1139,81 @@
     mLastNTPTime = GetNowNTP();
 }
 
+void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
+        String8& remoteIp, int remotePort) {
+    if (localIp.contains(":"))
+        mIsIPv6 = true;
+    else
+        mIsIPv6 = false;
+
+    mRTPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTPSocket, 0);
+    mRTCPSocket = socket(mIsIPv6 ? AF_INET6 : AF_INET, SOCK_DGRAM, 0);
+    CHECK_GE(mRTCPSocket, 0);
+
+    int sockopt = 1;
+    setsockopt(mRTPSocket, SOL_SOCKET, SO_REUSEPORT, (int *)&sockopt, sizeof(sockopt));
+    setsockopt(mRTCPSocket, SOL_SOCKET, SO_REUSEPORT, (int *)&sockopt, sizeof(sockopt));
+
+    if (mIsIPv6) {
+        memset(&mLocalAddr6, 0, sizeof(mLocalAddr6));
+        memset(&mRTPAddr6, 0, sizeof(mRTPAddr6));
+        memset(&mRTCPAddr6, 0, sizeof(mRTCPAddr6));
+
+        mLocalAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, localIp.string(), &mLocalAddr6.sin6_addr);
+        mLocalAddr6.sin6_port = htons((uint16_t)localPort);
+
+        mRTPAddr6.sin6_family = AF_INET6;
+        inet_pton(AF_INET6, remoteIp.string(), &mRTPAddr6.sin6_addr);
+        mRTPAddr6.sin6_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr6 = mRTPAddr6;
+        mRTCPAddr6.sin6_port = htons((uint16_t)(remotePort + 1));
+    } else {
+        memset(&mLocalAddr, 0, sizeof(mLocalAddr));
+        memset(&mRTPAddr, 0, sizeof(mRTPAddr));
+        memset(&mRTCPAddr, 0, sizeof(mRTCPAddr));
+
+        mLocalAddr.sin_family = AF_INET;
+        mLocalAddr.sin_addr.s_addr = inet_addr(localIp.string());
+        mLocalAddr.sin_port = htons((uint16_t)localPort);
+
+        mRTPAddr.sin_family = AF_INET;
+        mRTPAddr.sin_addr.s_addr = inet_addr(remoteIp.string());
+        mRTPAddr.sin_port = htons((uint16_t)remotePort);
+
+        mRTCPAddr = mRTPAddr;
+        mRTCPAddr.sin_port = htons((uint16_t)(remotePort + 1));
+    }
+
+    struct sockaddr *localAddr = mIsIPv6 ?
+        (struct sockaddr*)&mLocalAddr6 : (struct sockaddr*)&mLocalAddr;
+
+    int sizeSockSt = mIsIPv6 ? sizeof(mLocalAddr6) : sizeof(mLocalAddr);
+
+    if (bind(mRTPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtp %s:%d err=%s", localIp.string(), localPort, strerror(errno));
+    } else {
+        ALOGI("succeed to bind rtp %s:%d", localIp.string(), localPort);
+    }
+
+    if (mIsIPv6)
+        mLocalAddr6.sin6_port = htons((uint16_t)(localPort + 1));
+    else
+        mLocalAddr.sin_port = htons((uint16_t)(localPort + 1));
+
+    if (bind(mRTCPSocket, localAddr, sizeSockSt) == -1) {
+        ALOGE("failed to bind rtcp %s:%d err=%s", localIp.string(), localPort + 1, strerror(errno));
+    } else {
+        ALOGI("succeed to bind rtcp %s:%d", localIp.string(), localPort + 1);
+    }
+
+    if (mIsIPv6)
+        mLocalAddr6.sin6_port = htons((uint16_t)localPort);
+    else
+        mLocalAddr.sin_port = htons((uint16_t)localPort);
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 2f13486..46df94b 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -36,6 +36,8 @@
 
 struct ARTPWriter : public MediaWriter {
     explicit ARTPWriter(int fd);
+    explicit ARTPWriter(int fd, String8& localIp, int localPort,
+                                String8& remoteIp, int remotePort);
 
     virtual status_t addSource(const sp<MediaSource> &source);
     virtual bool reachedEOS();
@@ -76,14 +78,22 @@
     sp<ALooper> mLooper;
     sp<AHandlerReflector<ARTPWriter> > mReflector;
 
-    int mSocket;
+    bool mIsIPv6;
+    int mRTPSocket, mRTCPSocket;
+    struct sockaddr_in mLocalAddr;
     struct sockaddr_in mRTPAddr;
     struct sockaddr_in mRTCPAddr;
+    struct sockaddr_in6 mLocalAddr6;
+    struct sockaddr_in6 mRTPAddr6;
+    struct sockaddr_in6 mRTCPAddr6;
 
     AString mProfileLevel;
     AString mSeqParamSet;
     AString mPicParamSet;
 
+    MediaBufferBase *mSPSBuf;
+    MediaBufferBase *mPPSBuf;
+
     uint32_t mSourceID;
     uint32_t mSeqNo;
     uint32_t mRTPTimeBase;
@@ -96,6 +106,7 @@
 
     enum {
         INVALID,
+        H265,
         H264,
         H263,
         AMR_NB,
@@ -114,11 +125,14 @@
     void dumpSessionDesc();
 
     void sendBye();
+    void sendSPSPPSIfIFrame(MediaBufferBase *mediaBuf, int64_t timeUs);
+    void sendHEVCData(MediaBufferBase *mediaBuf);
     void sendAVCData(MediaBufferBase *mediaBuf);
     void sendH263Data(MediaBufferBase *mediaBuf);
     void sendAMRData(MediaBufferBase *mediaBuf);
 
     void send(const sp<ABuffer> &buffer, bool isRTCP);
+    void makeSocketPairAndBind(String8& localIp, int localPort, String8& remoteIp, int remotePort);
 
     DISALLOW_EVIL_CONSTRUCTORS(ARTPWriter);
 };
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 2b42040..d8fde76 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -345,5 +345,64 @@
     return *npt2 > *npt1;
 }
 
+// static
+void ASessionDescription::SDPStringFactory(AString &sdp,
+    const char *ip, bool isAudio, unsigned port, unsigned payloadType,
+    unsigned as, const char *codec, const char *fmtp, int32_t width, int32_t height)
+{
+    bool isIPv4 = (AString(ip).find("::") == -1) ? true : false;
+    sdp.clear();
+    sdp.append("v=0\r\n");
+
+    sdp.append("a=range:npt=now-\r\n");
+
+    sdp.append("m=");
+    sdp.append(isAudio ? "audio " : "video ");
+    sdp.append(port);
+    sdp.append(" RTP/AVP ");
+    sdp.append(payloadType);
+    sdp.append("\r\n");
+
+    sdp.append("c= IN IP");
+    if(isIPv4)
+     sdp.append("4 ");
+    else
+     sdp.append("6 ");
+    sdp.append(ip);
+    sdp.append("\r\n");
+
+    sdp.append("b=AS:");
+    sdp.append(as > 0 ? as : 960);
+    sdp.append("\r\n");
+
+    sdp.append("a=rtpmap:");
+    sdp.append(payloadType);
+    sdp.append(" ");
+    sdp.append(codec);
+    sdp.append("/");
+    sdp.append(isAudio ? "8000" : "90000");
+    sdp.append("\r\n");
+
+    if(fmtp != NULL) {
+        sdp.append("a=fmtp:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(fmtp);
+        sdp.append("\r\n");
+    }
+
+    if(width > 0 && height > 0) {
+        sdp.append("a=framesize:");
+        sdp.append(payloadType);
+        sdp.append(" ");
+        sdp.append(width);
+        sdp.append("-");
+        sdp.append(height);
+        sdp.append("\r\n");
+    }
+
+    ALOGV("SDPStringFactory => %s", sdp.c_str());
+}
+
 }  // namespace android
 
diff --git a/media/libstagefright/rtsp/ASessionDescription.h b/media/libstagefright/rtsp/ASessionDescription.h
index b462983..bd92916 100644
--- a/media/libstagefright/rtsp/ASessionDescription.h
+++ b/media/libstagefright/rtsp/ASessionDescription.h
@@ -63,6 +63,9 @@
     // i.e. we have a fixed duration, otherwise this is live streaming.
     static bool parseNTPRange(const char *s, float *npt1, float *npt2);
 
+    static void SDPStringFactory(AString &sdp, const char *ip, bool isAudio, unsigned port,
+        unsigned payloadType, unsigned as, const char *codec, const char *fmtp = NULL,
+        int32_t width = 0, int32_t height = 0);
 protected:
     virtual ~ASessionDescription();
 
diff --git a/media/libstagefright/rtsp/Android.bp b/media/libstagefright/rtsp/Android.bp
index 29e908e..d50f774 100644
--- a/media/libstagefright/rtsp/Android.bp
+++ b/media/libstagefright/rtsp/Android.bp
@@ -23,6 +23,7 @@
     srcs: [
         "AAMRAssembler.cpp",
         "AAVCAssembler.cpp",
+        "AHEVCAssembler.cpp",
         "AH263Assembler.cpp",
         "AMPEG2TSAssembler.cpp",
         "AMPEG4AudioAssembler.cpp",